diff --git a/api/.env.example b/api/.env.example index 71f0e5db8f..aa155003ab 100644 --- a/api/.env.example +++ b/api/.env.example @@ -20,6 +20,9 @@ FILES_URL=http://127.0.0.1:5001 # The time in seconds after the signature is rejected FILES_ACCESS_TIMEOUT=300 +# Access token expiration time in minutes +ACCESS_TOKEN_EXPIRE_MINUTES=60 + # celery configuration CELERY_BROKER_URL=redis://:difyai123456@localhost:6379/1 @@ -39,7 +42,7 @@ DB_DATABASE=dify # Storage configuration # use for store upload files, private keys... -# storage type: local, s3, azure-blob, google-storage, tencent-cos, huawei-obs, volcengine-tos, baidu-obs +# storage type: local, s3, azure-blob, google-storage, tencent-cos, huawei-obs, volcengine-tos, baidu-obs, supabase STORAGE_TYPE=local STORAGE_LOCAL_PATH=storage S3_USE_AWS_MANAGED_IAM=false @@ -99,11 +102,16 @@ VOLCENGINE_TOS_ACCESS_KEY=your-access-key VOLCENGINE_TOS_SECRET_KEY=your-secret-key VOLCENGINE_TOS_REGION=your-region +# Supabase Storage Configuration +SUPABASE_BUCKET_NAME=your-bucket-name +SUPABASE_API_KEY=your-access-key +SUPABASE_URL=your-server-url + # CORS configuration WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,* CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,* -# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector +# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, vikingdb VECTOR_STORE=weaviate # Weaviate configuration @@ -203,6 +211,24 @@ OPENSEARCH_USER=admin OPENSEARCH_PASSWORD=admin OPENSEARCH_SECURE=true +# Baidu configuration +BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287 +BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS=30000 +BAIDU_VECTOR_DB_ACCOUNT=root +BAIDU_VECTOR_DB_API_KEY=dify +BAIDU_VECTOR_DB_DATABASE=dify +BAIDU_VECTOR_DB_SHARD=1 +BAIDU_VECTOR_DB_REPLICAS=3 + +# ViKingDB configuration +VIKINGDB_ACCESS_KEY=your-ak +VIKINGDB_SECRET_KEY=your-sk +VIKINGDB_REGION=cn-shanghai +VIKINGDB_HOST=api-vikingdb.xxx.volces.com +VIKINGDB_SCHEMA=http +VIKINGDB_CONNECTION_TIMEOUT=30 +VIKINGDB_SOCKET_TIMEOUT=30 + # Upload configuration UPLOAD_FILE_SIZE_LIMIT=15 UPLOAD_FILE_BATCH_LIMIT=5 diff --git a/api/app.py b/api/app.py index a251ef5f0f..52dd492225 100644 --- a/api/app.py +++ b/api/app.py @@ -183,7 +183,7 @@ def load_user_from_request(request_from_flask_login): decoded = PassportService().verify(auth_token) user_id = decoded.get("user_id") - logged_in_account = AccountService.load_logged_in_account(account_id=user_id, token=auth_token) + logged_in_account = AccountService.load_logged_in_account(account_id=user_id) if logged_in_account: contexts.tenant_id.set(logged_in_account.current_tenant_id) return logged_in_account diff --git a/api/commands.py b/api/commands.py index 7ef4aed7f7..dbcd8a744d 100644 --- a/api/commands.py +++ b/api/commands.py @@ -347,6 +347,14 @@ def migrate_knowledge_vector_database(): index_name = Dataset.gen_collection_name_by_id(dataset_id) index_struct_dict = {"type": "elasticsearch", "vector_store": {"class_prefix": index_name}} dataset.index_struct = json.dumps(index_struct_dict) + elif vector_type == VectorType.BAIDU: + dataset_id = dataset.id + collection_name = Dataset.gen_collection_name_by_id(dataset_id) + index_struct_dict = { + "type": VectorType.BAIDU, + "vector_store": {"class_prefix": collection_name}, + } + dataset.index_struct = json.dumps(index_struct_dict) else: raise ValueError(f"Vector store {vector_type} is not supported.") diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index 93dbc1367f..a3334d1634 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -360,9 +360,9 @@ class WorkflowConfig(BaseSettings): ) -class OAuthConfig(BaseSettings): +class AuthConfig(BaseSettings): """ - Configuration for OAuth authentication + Configuration for authentication and OAuth """ OAUTH_REDIRECT_PATH: str = Field( @@ -371,7 +371,7 @@ class OAuthConfig(BaseSettings): ) GITHUB_CLIENT_ID: Optional[str] = Field( - description="GitHub OAuth client secret", + description="GitHub OAuth client ID", default=None, ) @@ -390,6 +390,11 @@ class OAuthConfig(BaseSettings): default=None, ) + ACCESS_TOKEN_EXPIRE_MINUTES: PositiveInt = Field( + description="Expiration time for access tokens in minutes", + default=60, + ) + class ModerationConfig(BaseSettings): """ @@ -607,6 +612,7 @@ class PositionConfig(BaseSettings): class FeatureConfig( # place the configs in alphabet order AppExecutionConfig, + AuthConfig, # Changed from OAuthConfig to AuthConfig BillingConfig, CodeExecutionSandboxConfig, DataSetConfig, @@ -621,14 +627,13 @@ class FeatureConfig( MailConfig, ModelLoadBalanceConfig, ModerationConfig, - OAuthConfig, + PositionConfig, RagEtlConfig, SecurityConfig, ToolConfig, UpdateConfig, WorkflowConfig, WorkspaceConfig, - PositionConfig, # hosted services config HostedServiceConfig, CeleryBeatConfig, diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py index 8626236856..fa7f41d630 100644 --- a/api/configs/middleware/__init__.py +++ b/api/configs/middleware/__init__.py @@ -12,6 +12,7 @@ from configs.middleware.storage.baidu_obs_storage_config import BaiduOBSStorageC from configs.middleware.storage.google_cloud_storage_config import GoogleCloudStorageConfig from configs.middleware.storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig from configs.middleware.storage.oci_storage_config import OCIStorageConfig +from configs.middleware.storage.supabase_storage_config import SupabaseStorageConfig from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig from configs.middleware.storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig from configs.middleware.vdb.analyticdb_config import AnalyticdbConfig @@ -27,6 +28,7 @@ from configs.middleware.vdb.qdrant_config import QdrantConfig from configs.middleware.vdb.relyt_config import RelytConfig from configs.middleware.vdb.tencent_vector_config import TencentVectorDBConfig from configs.middleware.vdb.tidb_vector_config import TiDBVectorConfig +from configs.middleware.vdb.vikingdb_config import VikingDBConfig from configs.middleware.vdb.weaviate_config import WeaviateConfig @@ -191,6 +193,22 @@ class CeleryConfig(DatabaseConfig): return self.CELERY_BROKER_URL.startswith("rediss://") if self.CELERY_BROKER_URL else False +class InternalTestConfig(BaseSettings): + """ + Configuration settings for Internal Test + """ + + AWS_SECRET_ACCESS_KEY: Optional[str] = Field( + description="Internal test AWS secret access key", + default=None, + ) + + AWS_ACCESS_KEY_ID: Optional[str] = Field( + description="Internal test AWS access key ID", + default=None, + ) + + class MiddlewareConfig( # place the configs in alphabet order CeleryConfig, @@ -206,6 +224,7 @@ class MiddlewareConfig( HuaweiCloudOBSStorageConfig, OCIStorageConfig, S3StorageConfig, + SupabaseStorageConfig, TencentCloudCOSStorageConfig, VolcengineTOSStorageConfig, # configs of vdb and vdb providers @@ -224,5 +243,7 @@ class MiddlewareConfig( TiDBVectorConfig, WeaviateConfig, ElasticsearchConfig, + InternalTestConfig, + VikingDBConfig, ): pass diff --git a/api/configs/middleware/storage/supabase_storage_config.py b/api/configs/middleware/storage/supabase_storage_config.py new file mode 100644 index 0000000000..a3e905b21c --- /dev/null +++ b/api/configs/middleware/storage/supabase_storage_config.py @@ -0,0 +1,24 @@ +from typing import Optional + +from pydantic import BaseModel, Field + + +class SupabaseStorageConfig(BaseModel): + """ + Configuration settings for Supabase Object Storage Service + """ + + SUPABASE_BUCKET_NAME: Optional[str] = Field( + description="Name of the Supabase bucket to store and retrieve objects (e.g., 'dify-bucket')", + default=None, + ) + + SUPABASE_API_KEY: Optional[str] = Field( + description="API KEY for authenticating with Supabase", + default=None, + ) + + SUPABASE_URL: Optional[str] = Field( + description="URL of the Supabase", + default=None, + ) diff --git a/api/configs/middleware/vdb/baidu_vector_config.py b/api/configs/middleware/vdb/baidu_vector_config.py new file mode 100644 index 0000000000..44742c2e2f --- /dev/null +++ b/api/configs/middleware/vdb/baidu_vector_config.py @@ -0,0 +1,45 @@ +from typing import Optional + +from pydantic import Field, NonNegativeInt, PositiveInt +from pydantic_settings import BaseSettings + + +class BaiduVectorDBConfig(BaseSettings): + """ + Configuration settings for Baidu Vector Database + """ + + BAIDU_VECTOR_DB_ENDPOINT: Optional[str] = Field( + description="URL of the Baidu Vector Database service (e.g., 'http://vdb.bj.baidubce.com')", + default=None, + ) + + BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS: PositiveInt = Field( + description="Timeout in milliseconds for Baidu Vector Database operations (default is 30000 milliseconds)", + default=30000, + ) + + BAIDU_VECTOR_DB_ACCOUNT: Optional[str] = Field( + description="Account for authenticating with the Baidu Vector Database", + default=None, + ) + + BAIDU_VECTOR_DB_API_KEY: Optional[str] = Field( + description="API key for authenticating with the Baidu Vector Database service", + default=None, + ) + + BAIDU_VECTOR_DB_DATABASE: Optional[str] = Field( + description="Name of the specific Baidu Vector Database to connect to", + default=None, + ) + + BAIDU_VECTOR_DB_SHARD: PositiveInt = Field( + description="Number of shards for the Baidu Vector Database (default is 1)", + default=1, + ) + + BAIDU_VECTOR_DB_REPLICAS: NonNegativeInt = Field( + description="Number of replicas for the Baidu Vector Database (default is 3)", + default=3, + ) diff --git a/api/configs/middleware/vdb/vikingdb_config.py b/api/configs/middleware/vdb/vikingdb_config.py new file mode 100644 index 0000000000..5ad98d898a --- /dev/null +++ b/api/configs/middleware/vdb/vikingdb_config.py @@ -0,0 +1,37 @@ +from typing import Optional + +from pydantic import BaseModel, Field + + +class VikingDBConfig(BaseModel): + """ + Configuration for connecting to Volcengine VikingDB. + Refer to the following documentation for details on obtaining credentials: + https://www.volcengine.com/docs/6291/65568 + """ + + VIKINGDB_ACCESS_KEY: Optional[str] = Field( + default=None, description="The Access Key provided by Volcengine VikingDB for API authentication." + ) + VIKINGDB_SECRET_KEY: Optional[str] = Field( + default=None, description="The Secret Key provided by Volcengine VikingDB for API authentication." + ) + VIKINGDB_REGION: Optional[str] = Field( + default="cn-shanghai", + description="The region of the Volcengine VikingDB service.(e.g., 'cn-shanghai', 'cn-beijing').", + ) + VIKINGDB_HOST: Optional[str] = Field( + default="api-vikingdb.mlp.cn-shanghai.volces.com", + description="The host of the Volcengine VikingDB service.(e.g., 'api-vikingdb.volces.com', \ + 'api-vikingdb.mlp.cn-shanghai.volces.com')", + ) + VIKINGDB_SCHEME: Optional[str] = Field( + default="http", + description="The scheme of the Volcengine VikingDB service.(e.g., 'http', 'https').", + ) + VIKINGDB_CONNECTION_TIMEOUT: Optional[int] = Field( + default=30, description="The connection timeout of the Volcengine VikingDB service." + ) + VIKINGDB_SOCKET_TIMEOUT: Optional[int] = Field( + default=30, description="The socket timeout of the Volcengine VikingDB service." + ) diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py index 62837af2b9..18a7b23166 100644 --- a/api/controllers/console/auth/login.py +++ b/api/controllers/console/auth/login.py @@ -7,7 +7,7 @@ from flask_restful import Resource, reqparse import services from controllers.console import api from controllers.console.setup import setup_required -from libs.helper import email, get_remote_ip +from libs.helper import email, extract_remote_ip from libs.password import valid_password from models.account import Account from services.account_service import AccountService, TenantService @@ -40,17 +40,16 @@ class LoginApi(Resource): "data": "workspace not found, please contact system admin to invite you to join in a workspace", } - token = AccountService.login(account, ip_address=get_remote_ip(request)) + token_pair = AccountService.login(account=account, ip_address=extract_remote_ip(request)) - return {"result": "success", "data": token} + return {"result": "success", "data": token_pair.model_dump()} class LogoutApi(Resource): @setup_required def get(self): account = cast(Account, flask_login.current_user) - token = request.headers.get("Authorization", "").split(" ")[1] - AccountService.logout(account=account, token=token) + AccountService.logout(account=account) flask_login.logout_user() return {"result": "success"} @@ -106,5 +105,19 @@ class ResetPasswordApi(Resource): return {"result": "success"} +class RefreshTokenApi(Resource): + def post(self): + parser = reqparse.RequestParser() + parser.add_argument("refresh_token", type=str, required=True, location="json") + args = parser.parse_args() + + try: + new_token_pair = AccountService.refresh_token(args["refresh_token"]) + return {"result": "success", "data": new_token_pair.model_dump()} + except Exception as e: + return {"result": "fail", "data": str(e)}, 401 + + api.add_resource(LoginApi, "/login") api.add_resource(LogoutApi, "/logout") +api.add_resource(RefreshTokenApi, "/refresh-token") diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index ad0c0580ae..c5909b8c10 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -9,7 +9,7 @@ from flask_restful import Resource from configs import dify_config from constants.languages import languages from extensions.ext_database import db -from libs.helper import get_remote_ip +from libs.helper import extract_remote_ip from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo from models.account import Account, AccountStatus from services.account_service import AccountService, RegisterService, TenantService @@ -81,9 +81,14 @@ class OAuthCallback(Resource): TenantService.create_owner_tenant_if_not_exist(account) - token = AccountService.login(account, ip_address=get_remote_ip(request)) + token_pair = AccountService.login( + account=account, + ip_address=extract_remote_ip(request), + ) - return redirect(f"{dify_config.CONSOLE_WEB_URL}?console_token={token}") + return redirect( + f"{dify_config.CONSOLE_WEB_URL}?access_token={token_pair.access_token}&refresh_token={token_pair.refresh_token}" + ) def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Optional[Account]: diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 9561fd8b70..6583356d23 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -617,6 +617,8 @@ class DatasetRetrievalSettingApi(Resource): | VectorType.CHROMA | VectorType.TENCENT | VectorType.PGVECTO_RS + | VectorType.BAIDU + | VectorType.VIKINGDB ): return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]} case ( @@ -653,6 +655,8 @@ class DatasetRetrievalSettingMockApi(Resource): | VectorType.CHROMA | VectorType.TENCENT | VectorType.PGVECTO_RS + | VectorType.BAIDU + | VectorType.VIKINGDB ): return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]} case ( diff --git a/api/controllers/console/datasets/external.py b/api/controllers/console/datasets/external.py index dcef979360..2dc054cfbd 100644 --- a/api/controllers/console/datasets/external.py +++ b/api/controllers/console/datasets/external.py @@ -13,6 +13,7 @@ from libs.login import login_required from services.dataset_service import DatasetService from services.external_knowledge_service import ExternalDatasetService from services.hit_testing_service import HitTestingService +from services.knowledge_service import ExternalDatasetTestService def _validate_name(name): @@ -232,8 +233,31 @@ class ExternalKnowledgeHitTestingApi(Resource): raise InternalServerError(str(e)) +class BedrockRetrievalApi(Resource): + # this api is only for internal testing + def post(self): + parser = reqparse.RequestParser() + parser.add_argument("retrieval_setting", nullable=False, required=True, type=dict, location="json") + parser.add_argument( + "query", + nullable=False, + required=True, + type=str, + ) + parser.add_argument("knowledge_id", nullable=False, required=True, type=str) + args = parser.parse_args() + + # Call the knowledge retrieval service + result = ExternalDatasetTestService.knowledge_retrieval( + args["retrieval_setting"], args["query"], args["knowledge_id"] + ) + return result, 200 + + api.add_resource(ExternalKnowledgeHitTestingApi, "/datasets//external-hit-testing") api.add_resource(ExternalDatasetCreateApi, "/datasets/external") api.add_resource(ExternalApiTemplateListApi, "/datasets/external-knowledge-api") api.add_resource(ExternalApiTemplateApi, "/datasets/external-knowledge-api/") api.add_resource(ExternalApiUseCheckApi, "/datasets/external-knowledge-api//use-check") +# this api is only for internal test +api.add_resource(BedrockRetrievalApi, "/test/retrieval") diff --git a/api/controllers/console/setup.py b/api/controllers/console/setup.py index 46b4ef5d87..15a4af118b 100644 --- a/api/controllers/console/setup.py +++ b/api/controllers/console/setup.py @@ -4,7 +4,7 @@ from flask import request from flask_restful import Resource, reqparse from configs import dify_config -from libs.helper import StrLen, email, get_remote_ip +from libs.helper import StrLen, email, extract_remote_ip from libs.password import valid_password from models.model import DifySetup from services.account_service import RegisterService, TenantService @@ -46,7 +46,7 @@ class SetupApi(Resource): # setup RegisterService.setup( - email=args["email"], name=args["name"], password=args["password"], ip_address=get_remote_ip(request) + email=args["email"], name=args["name"], password=args["password"], ip_address=extract_remote_ip(request) ) return {"result": "success"}, 201 diff --git a/api/controllers/console/workspace/model_providers.py b/api/controllers/console/workspace/model_providers.py index fe0bcf7338..9e8a53bbfb 100644 --- a/api/controllers/console/workspace/model_providers.py +++ b/api/controllers/console/workspace/model_providers.py @@ -126,13 +126,12 @@ class ModelProviderIconApi(Resource): Get model provider icon """ - @setup_required - @login_required - @account_initialization_required def get(self, provider: str, icon_type: str, lang: str): model_provider_service = ModelProviderService() icon, mimetype = model_provider_service.get_model_provider_icon( - provider=provider, icon_type=icon_type, lang=lang + provider=provider, + icon_type=icon_type, + lang=lang, ) return send_file(io.BytesIO(icon), mimetype=mimetype) diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index 6bf684f8e4..fd63c7787f 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -56,6 +56,7 @@ from models.account import Account from models.model import Conversation, EndUser, Message from models.workflow import ( Workflow, + WorkflowNodeExecution, WorkflowRunStatus, ) @@ -72,6 +73,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc _workflow: Workflow _user: Union[Account, EndUser] _workflow_system_variables: dict[SystemVariableKey, Any] + _wip_workflow_node_executions: dict[str, WorkflowNodeExecution] def __init__( self, @@ -115,6 +117,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc } self._task_state = WorkflowTaskState() + self._wip_workflow_node_executions = {} self._conversation_name_generate_thread = None diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index 3afc505367..7c53556e43 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -52,6 +52,7 @@ from models.workflow import ( Workflow, WorkflowAppLog, WorkflowAppLogCreatedFrom, + WorkflowNodeExecution, WorkflowRun, WorkflowRunStatus, ) @@ -69,6 +70,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa _task_state: WorkflowTaskState _application_generate_entity: WorkflowAppGenerateEntity _workflow_system_variables: dict[SystemVariableKey, Any] + _wip_workflow_node_executions: dict[str, WorkflowNodeExecution] def __init__( self, @@ -103,6 +105,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa } self._task_state = WorkflowTaskState() + self._wip_workflow_node_executions = {} def process(self) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]: """ diff --git a/api/core/app/task_pipeline/message_cycle_manage.py b/api/core/app/task_pipeline/message_cycle_manage.py index 3a1d1b227f..236eebf0b8 100644 --- a/api/core/app/task_pipeline/message_cycle_manage.py +++ b/api/core/app/task_pipeline/message_cycle_manage.py @@ -1,8 +1,10 @@ +import logging from threading import Thread from typing import Optional, Union from flask import Flask, current_app +from configs import dify_config from core.app.entities.app_invoke_entities import ( AdvancedChatAppGenerateEntity, AgentChatAppGenerateEntity, @@ -83,7 +85,9 @@ class MessageCycleManage: name = LLMGenerator.generate_conversation_name(app_model.tenant_id, query) conversation.name = name except Exception as e: - logging.exception(f"generate conversation name failed: {e}") + if dify_config.DEBUG: + logging.exception(f"generate conversation name failed: {e}") + pass db.session.merge(conversation) db.session.commit() diff --git a/api/core/app/task_pipeline/workflow_cycle_manage.py b/api/core/app/task_pipeline/workflow_cycle_manage.py index 4fc587db77..b8f5ac2603 100644 --- a/api/core/app/task_pipeline/workflow_cycle_manage.py +++ b/api/core/app/task_pipeline/workflow_cycle_manage.py @@ -57,6 +57,7 @@ class WorkflowCycleManage: _user: Union[Account, EndUser] _task_state: WorkflowTaskState _workflow_system_variables: dict[SystemVariableKey, Any] + _wip_workflow_node_executions: dict[str, WorkflowNodeExecution] def _handle_workflow_run_start(self) -> WorkflowRun: max_sequence = ( @@ -251,6 +252,8 @@ class WorkflowCycleManage: db.session.refresh(workflow_node_execution) db.session.close() + self._wip_workflow_node_executions[workflow_node_execution.node_execution_id] = workflow_node_execution + return workflow_node_execution def _handle_workflow_node_execution_success(self, event: QueueNodeSucceededEvent) -> WorkflowNodeExecution: @@ -263,20 +266,36 @@ class WorkflowCycleManage: inputs = WorkflowEntry.handle_special_values(event.inputs) outputs = WorkflowEntry.handle_special_values(event.outputs) + execution_metadata = ( + json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None + ) + finished_at = datetime.now(timezone.utc).replace(tzinfo=None) + elapsed_time = (finished_at - event.start_at).total_seconds() + + db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution.id).update( + { + WorkflowNodeExecution.status: WorkflowNodeExecutionStatus.SUCCEEDED.value, + WorkflowNodeExecution.inputs: json.dumps(inputs) if inputs else None, + WorkflowNodeExecution.process_data: json.dumps(event.process_data) if event.process_data else None, + WorkflowNodeExecution.outputs: json.dumps(outputs) if outputs else None, + WorkflowNodeExecution.execution_metadata: execution_metadata, + WorkflowNodeExecution.finished_at: finished_at, + WorkflowNodeExecution.elapsed_time: elapsed_time, + } + ) + + db.session.commit() + db.session.close() workflow_node_execution.status = WorkflowNodeExecutionStatus.SUCCEEDED.value workflow_node_execution.inputs = json.dumps(inputs) if inputs else None workflow_node_execution.process_data = json.dumps(event.process_data) if event.process_data else None workflow_node_execution.outputs = json.dumps(outputs) if outputs else None - workflow_node_execution.execution_metadata = ( - json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None - ) - workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None) - workflow_node_execution.elapsed_time = (workflow_node_execution.finished_at - event.start_at).total_seconds() + workflow_node_execution.execution_metadata = execution_metadata + workflow_node_execution.finished_at = finished_at + workflow_node_execution.elapsed_time = elapsed_time - db.session.commit() - db.session.refresh(workflow_node_execution) - db.session.close() + self._wip_workflow_node_executions.pop(workflow_node_execution.node_execution_id) return workflow_node_execution @@ -290,18 +309,33 @@ class WorkflowCycleManage: inputs = WorkflowEntry.handle_special_values(event.inputs) outputs = WorkflowEntry.handle_special_values(event.outputs) + finished_at = datetime.now(timezone.utc).replace(tzinfo=None) + elapsed_time = (finished_at - event.start_at).total_seconds() + + db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution.id).update( + { + WorkflowNodeExecution.status: WorkflowNodeExecutionStatus.FAILED.value, + WorkflowNodeExecution.error: event.error, + WorkflowNodeExecution.inputs: json.dumps(inputs) if inputs else None, + WorkflowNodeExecution.process_data: json.dumps(event.process_data) if event.process_data else None, + WorkflowNodeExecution.outputs: json.dumps(outputs) if outputs else None, + WorkflowNodeExecution.finished_at: finished_at, + WorkflowNodeExecution.elapsed_time: elapsed_time, + } + ) + + db.session.commit() + db.session.close() workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value workflow_node_execution.error = event.error - workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None) workflow_node_execution.inputs = json.dumps(inputs) if inputs else None workflow_node_execution.process_data = json.dumps(event.process_data) if event.process_data else None workflow_node_execution.outputs = json.dumps(outputs) if outputs else None - workflow_node_execution.elapsed_time = (workflow_node_execution.finished_at - event.start_at).total_seconds() + workflow_node_execution.finished_at = finished_at + workflow_node_execution.elapsed_time = elapsed_time - db.session.commit() - db.session.refresh(workflow_node_execution) - db.session.close() + self._wip_workflow_node_executions.pop(workflow_node_execution.node_execution_id) return workflow_node_execution @@ -678,17 +712,7 @@ class WorkflowCycleManage: :param node_execution_id: workflow node execution id :return: """ - workflow_node_execution = ( - db.session.query(WorkflowNodeExecution) - .filter( - WorkflowNodeExecution.tenant_id == self._application_generate_entity.app_config.tenant_id, - WorkflowNodeExecution.app_id == self._application_generate_entity.app_config.app_id, - WorkflowNodeExecution.workflow_id == self._workflow.id, - WorkflowNodeExecution.triggered_from == WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value, - WorkflowNodeExecution.node_execution_id == node_execution_id, - ) - .first() - ) + workflow_node_execution = self._wip_workflow_node_executions.get(node_execution_id) if not workflow_node_execution: raise Exception(f"Workflow node execution not found: {node_execution_id}") diff --git a/api/core/embedding/cached_embedding.py b/api/core/embedding/cached_embedding.py index 75219051cd..31d2171e72 100644 --- a/api/core/embedding/cached_embedding.py +++ b/api/core/embedding/cached_embedding.py @@ -5,6 +5,7 @@ from typing import Optional, cast import numpy as np from sqlalchemy.exc import IntegrityError +from configs import dify_config from core.embedding.embedding_constant import EmbeddingInputType from core.model_manager import ModelInstance from core.model_runtime.entities.model_entities import ModelPropertyKey @@ -110,6 +111,8 @@ class CacheEmbedding(Embeddings): embedding_results = embedding_result.embeddings[0] embedding_results = (embedding_results / np.linalg.norm(embedding_results)).tolist() except Exception as ex: + if dify_config.DEBUG: + logging.exception(f"Failed to embed query text: {ex}") raise ex try: @@ -122,6 +125,8 @@ class CacheEmbedding(Embeddings): encoded_str = encoded_vector.decode("utf-8") redis_client.setex(embedding_cache_key, 600, encoded_str) except Exception as ex: - logging.exception("Failed to add embedding to redis %s", ex) + if dify_config.DEBUG: + logging.exception("Failed to add embedding to redis %s", ex) + raise ex return embedding_results diff --git a/api/core/memory/token_buffer_memory.py b/api/core/memory/token_buffer_memory.py index 72da3b0c6f..bc94912c1e 100644 --- a/api/core/memory/token_buffer_memory.py +++ b/api/core/memory/token_buffer_memory.py @@ -60,8 +60,8 @@ class TokenBufferMemory: thread_messages = extract_thread_messages(messages) # for newly created message, its answer is temporarily empty, we don't need to add it to memory - if thread_messages and not thread_messages[-1].answer: - thread_messages.pop() + if thread_messages and not thread_messages[0].answer: + thread_messages.pop(0) messages = list(reversed(thread_messages)) diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/llm.py b/api/core/model_runtime/model_providers/siliconflow/llm/llm.py index c1868b6ad0..4f8f4e0f61 100644 --- a/api/core/model_runtime/model_providers/siliconflow/llm/llm.py +++ b/api/core/model_runtime/model_providers/siliconflow/llm/llm.py @@ -1,8 +1,18 @@ from collections.abc import Generator from typing import Optional, Union -from core.model_runtime.entities.llm_entities import LLMResult +from core.model_runtime.entities.common_entities import I18nObject +from core.model_runtime.entities.llm_entities import LLMMode, LLMResult from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool +from core.model_runtime.entities.model_entities import ( + AIModelEntity, + FetchFrom, + ModelFeature, + ModelPropertyKey, + ModelType, + ParameterRule, + ParameterType, +) from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel @@ -29,3 +39,53 @@ class SiliconflowLargeLanguageModel(OAIAPICompatLargeLanguageModel): def _add_custom_parameters(cls, credentials: dict) -> None: credentials["mode"] = "chat" credentials["endpoint_url"] = "https://api.siliconflow.cn/v1" + + def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + return AIModelEntity( + model=model, + label=I18nObject(en_US=model, zh_Hans=model), + model_type=ModelType.LLM, + features=[ModelFeature.TOOL_CALL, ModelFeature.MULTI_TOOL_CALL, ModelFeature.STREAM_TOOL_CALL] + if credentials.get("function_calling_type") == "tool_call" + else [], + fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, + model_properties={ + ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size", 8000)), + ModelPropertyKey.MODE: LLMMode.CHAT.value, + }, + parameter_rules=[ + ParameterRule( + name="temperature", + use_template="temperature", + label=I18nObject(en_US="Temperature", zh_Hans="温度"), + type=ParameterType.FLOAT, + ), + ParameterRule( + name="max_tokens", + use_template="max_tokens", + default=512, + min=1, + max=int(credentials.get("max_tokens", 1024)), + label=I18nObject(en_US="Max Tokens", zh_Hans="最大标记"), + type=ParameterType.INT, + ), + ParameterRule( + name="top_p", + use_template="top_p", + label=I18nObject(en_US="Top P", zh_Hans="Top P"), + type=ParameterType.FLOAT, + ), + ParameterRule( + name="top_k", + use_template="top_k", + label=I18nObject(en_US="Top K", zh_Hans="Top K"), + type=ParameterType.FLOAT, + ), + ParameterRule( + name="frequency_penalty", + use_template="frequency_penalty", + label=I18nObject(en_US="Frequency Penalty", zh_Hans="重复惩罚"), + type=ParameterType.FLOAT, + ), + ], + ) diff --git a/api/core/model_runtime/model_providers/siliconflow/siliconflow.yaml b/api/core/model_runtime/model_providers/siliconflow/siliconflow.yaml index c46a891604..71f9a92381 100644 --- a/api/core/model_runtime/model_providers/siliconflow/siliconflow.yaml +++ b/api/core/model_runtime/model_providers/siliconflow/siliconflow.yaml @@ -20,6 +20,7 @@ supported_model_types: - speech2text configurate_methods: - predefined-model + - customizable-model provider_credential_schema: credential_form_schemas: - variable: api_key @@ -30,3 +31,57 @@ provider_credential_schema: placeholder: zh_Hans: 在此输入您的 API Key en_US: Enter your API Key +model_credential_schema: + model: + label: + en_US: Model Name + zh_Hans: 模型名称 + placeholder: + en_US: Enter your model name + zh_Hans: 输入模型名称 + credential_form_schemas: + - variable: api_key + label: + en_US: API Key + type: secret-input + required: true + placeholder: + zh_Hans: 在此输入您的 API Key + en_US: Enter your API Key + - variable: context_size + label: + zh_Hans: 模型上下文长度 + en_US: Model context size + required: true + type: text-input + default: '4096' + placeholder: + zh_Hans: 在此输入您的模型上下文长度 + en_US: Enter your Model context size + - variable: max_tokens + label: + zh_Hans: 最大 token 上限 + en_US: Upper bound for max tokens + default: '4096' + type: text-input + show_on: + - variable: __model_type + value: llm + - variable: function_calling_type + label: + en_US: Function calling + type: select + required: false + default: no_call + options: + - value: no_call + label: + en_US: Not Support + zh_Hans: 不支持 + - value: function_call + label: + en_US: Support + zh_Hans: 支持 + show_on: + - variable: __model_type + value: llm diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/__init__.py b/api/core/model_runtime/model_providers/tongyi/rerank/__init__.py similarity index 100% rename from api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/__init__.py rename to api/core/model_runtime/model_providers/tongyi/rerank/__init__.py diff --git a/api/core/model_runtime/model_providers/tongyi/rerank/_position.yaml b/api/core/model_runtime/model_providers/tongyi/rerank/_position.yaml new file mode 100644 index 0000000000..439afda992 --- /dev/null +++ b/api/core/model_runtime/model_providers/tongyi/rerank/_position.yaml @@ -0,0 +1 @@ +- gte-rerank diff --git a/api/core/model_runtime/model_providers/tongyi/rerank/gte-rerank.yaml b/api/core/model_runtime/model_providers/tongyi/rerank/gte-rerank.yaml new file mode 100644 index 0000000000..44d51b9b0d --- /dev/null +++ b/api/core/model_runtime/model_providers/tongyi/rerank/gte-rerank.yaml @@ -0,0 +1,4 @@ +model: gte-rerank +model_type: rerank +model_properties: + context_size: 4000 diff --git a/api/core/model_runtime/model_providers/tongyi/rerank/rerank.py b/api/core/model_runtime/model_providers/tongyi/rerank/rerank.py new file mode 100644 index 0000000000..c9245bd82d --- /dev/null +++ b/api/core/model_runtime/model_providers/tongyi/rerank/rerank.py @@ -0,0 +1,136 @@ +from typing import Optional + +import dashscope +from dashscope.common.error import ( + AuthenticationError, + InvalidParameter, + RequestFailure, + ServiceUnavailableError, + UnsupportedHTTPMethod, + UnsupportedModel, +) + +from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult +from core.model_runtime.errors.invoke import ( + InvokeAuthorizationError, + InvokeBadRequestError, + InvokeConnectionError, + InvokeError, + InvokeRateLimitError, + InvokeServerUnavailableError, +) +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.__base.rerank_model import RerankModel + + +class GTERerankModel(RerankModel): + """ + Model class for GTE rerank model. + """ + + def _invoke( + self, + model: str, + credentials: dict, + query: str, + docs: list[str], + score_threshold: Optional[float] = None, + top_n: Optional[int] = None, + user: Optional[str] = None, + ) -> RerankResult: + """ + Invoke rerank model + + :param model: model name + :param credentials: model credentials + :param query: search query + :param docs: docs for reranking + :param score_threshold: score threshold + :param top_n: top n + :param user: unique user id + :return: rerank result + """ + if len(docs) == 0: + return RerankResult(model=model, docs=docs) + + # initialize client + dashscope.api_key = credentials["dashscope_api_key"] + + response = dashscope.TextReRank.call( + query=query, + documents=docs, + model=model, + top_n=top_n, + return_documents=True, + ) + + rerank_documents = [] + for _, result in enumerate(response.output.results): + # format document + rerank_document = RerankDocument( + index=result.index, + score=result.relevance_score, + text=result["document"]["text"], + ) + + # score threshold check + if score_threshold is not None: + if result.relevance_score >= score_threshold: + rerank_documents.append(rerank_document) + else: + rerank_documents.append(rerank_document) + + return RerankResult(model=model, docs=rerank_documents) + + def validate_credentials(self, model: str, credentials: dict) -> None: + """ + Validate model credentials + + :param model: model name + :param credentials: model credentials + :return: + """ + try: + self.invoke( + model=model, + credentials=credentials, + query="What is the capital of the United States?", + docs=[ + "Carson City is the capital city of the American state of Nevada. At the 2010 United States " + "Census, Carson City had a population of 55,274.", + "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " + "are a political division controlled by the United States. Its capital is Saipan.", + ], + score_threshold=0.8, + ) + except Exception as ex: + print(ex) + raise CredentialsValidateFailedError(str(ex)) + + @property + def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: + """ + Map model invoke error to unified error + The key is the error type thrown to the caller + The value is the error type thrown by the model, + which needs to be converted into a unified error type for the caller. + + :return: Invoke error mapping + """ + return { + InvokeConnectionError: [ + RequestFailure, + ], + InvokeServerUnavailableError: [ + ServiceUnavailableError, + ], + InvokeRateLimitError: [], + InvokeAuthorizationError: [ + AuthenticationError, + ], + InvokeBadRequestError: [ + InvalidParameter, + UnsupportedModel, + UnsupportedHTTPMethod, + ], + } diff --git a/api/core/model_runtime/model_providers/tongyi/tongyi.yaml b/api/core/model_runtime/model_providers/tongyi/tongyi.yaml index 1a09c20fd9..6349c22714 100644 --- a/api/core/model_runtime/model_providers/tongyi/tongyi.yaml +++ b/api/core/model_runtime/model_providers/tongyi/tongyi.yaml @@ -18,6 +18,7 @@ supported_model_types: - llm - tts - text-embedding + - rerank configurate_methods: - predefined-model - customizable-model diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/llm.py b/api/core/model_runtime/model_providers/zhipuai/llm/llm.py index e0c4980523..43bffad2a0 100644 --- a/api/core/model_runtime/model_providers/zhipuai/llm/llm.py +++ b/api/core/model_runtime/model_providers/zhipuai/llm/llm.py @@ -1,6 +1,10 @@ from collections.abc import Generator from typing import Optional, Union +from zhipuai import ZhipuAI +from zhipuai.types.chat.chat_completion import Completion +from zhipuai.types.chat.chat_completion_chunk import ChatCompletionChunk + from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, @@ -16,9 +20,6 @@ from core.model_runtime.entities.message_entities import ( from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.model_runtime.model_providers.zhipuai._common import _CommonZhipuaiAI -from core.model_runtime.model_providers.zhipuai.zhipuai_sdk._client import ZhipuAI -from core.model_runtime.model_providers.zhipuai.zhipuai_sdk.types.chat.chat_completion import Completion -from core.model_runtime.model_providers.zhipuai.zhipuai_sdk.types.chat.chat_completion_chunk import ChatCompletionChunk from core.model_runtime.utils import helper GLM_JSON_MODE_PROMPT = """You should always follow the instructions and output a valid JSON object. diff --git a/api/core/model_runtime/model_providers/zhipuai/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/zhipuai/text_embedding/text_embedding.py index 14a529dddf..5a34a3d593 100644 --- a/api/core/model_runtime/model_providers/zhipuai/text_embedding/text_embedding.py +++ b/api/core/model_runtime/model_providers/zhipuai/text_embedding/text_embedding.py @@ -1,13 +1,14 @@ import time from typing import Optional +from zhipuai import ZhipuAI + from core.embedding.embedding_constant import EmbeddingInputType from core.model_runtime.entities.model_entities import PriceType from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel from core.model_runtime.model_providers.zhipuai._common import _CommonZhipuaiAI -from core.model_runtime.model_providers.zhipuai.zhipuai_sdk._client import ZhipuAI class ZhipuAITextEmbeddingModel(_CommonZhipuaiAI, TextEmbeddingModel): diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/__init__.py deleted file mode 100644 index fc71d64714..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from .__version__ import __version__ -from ._client import ZhipuAI -from .core import ( - APIAuthenticationError, - APIConnectionError, - APIInternalError, - APIReachLimitError, - APIRequestFailedError, - APIResponseError, - APIResponseValidationError, - APIServerFlowExceedError, - APIStatusError, - APITimeoutError, - ZhipuAIError, -) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/__version__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/__version__.py deleted file mode 100644 index 51f8c49ecb..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/__version__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "v2.1.0" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/_client.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/_client.py deleted file mode 100644 index 705d371e62..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/_client.py +++ /dev/null @@ -1,82 +0,0 @@ -from __future__ import annotations - -import os -from collections.abc import Mapping -from typing import Union - -import httpx -from httpx import Timeout -from typing_extensions import override - -from . import api_resource -from .core import NOT_GIVEN, ZHIPUAI_DEFAULT_MAX_RETRIES, HttpClient, NotGiven, ZhipuAIError, _jwt_token - - -class ZhipuAI(HttpClient): - chat: api_resource.chat.Chat - api_key: str - _disable_token_cache: bool = True - - def __init__( - self, - *, - api_key: str | None = None, - base_url: str | httpx.URL | None = None, - timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN, - max_retries: int = ZHIPUAI_DEFAULT_MAX_RETRIES, - http_client: httpx.Client | None = None, - custom_headers: Mapping[str, str] | None = None, - disable_token_cache: bool = True, - _strict_response_validation: bool = False, - ) -> None: - if api_key is None: - api_key = os.environ.get("ZHIPUAI_API_KEY") - if api_key is None: - raise ZhipuAIError("未提供api_key,请通过参数或环境变量提供") - self.api_key = api_key - self._disable_token_cache = disable_token_cache - - if base_url is None: - base_url = os.environ.get("ZHIPUAI_BASE_URL") - if base_url is None: - base_url = "https://open.bigmodel.cn/api/paas/v4" - from .__version__ import __version__ - - super().__init__( - version=__version__, - base_url=base_url, - max_retries=max_retries, - timeout=timeout, - custom_httpx_client=http_client, - custom_headers=custom_headers, - _strict_response_validation=_strict_response_validation, - ) - self.chat = api_resource.chat.Chat(self) - self.images = api_resource.images.Images(self) - self.embeddings = api_resource.embeddings.Embeddings(self) - self.files = api_resource.files.Files(self) - self.fine_tuning = api_resource.fine_tuning.FineTuning(self) - self.batches = api_resource.Batches(self) - self.knowledge = api_resource.Knowledge(self) - self.tools = api_resource.Tools(self) - self.videos = api_resource.Videos(self) - self.assistant = api_resource.Assistant(self) - - @property - @override - def auth_headers(self) -> dict[str, str]: - api_key = self.api_key - if self._disable_token_cache: - return {"Authorization": f"Bearer {api_key}"} - else: - return {"Authorization": f"Bearer {_jwt_token.generate_token(api_key)}"} - - def __del__(self) -> None: - if not hasattr(self, "_has_custom_http_client") or not hasattr(self, "close") or not hasattr(self, "_client"): - # if the '__init__' method raised an error, self would not have client attr - return - - if self._has_custom_http_client: - return - - self.close() diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/__init__.py deleted file mode 100644 index 4fe0719dde..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -from .assistant import ( - Assistant, -) -from .batches import Batches -from .chat import ( - AsyncCompletions, - Chat, - Completions, -) -from .embeddings import Embeddings -from .files import Files, FilesWithRawResponse -from .fine_tuning import FineTuning -from .images import Images -from .knowledge import Knowledge -from .tools import Tools -from .videos import ( - Videos, -) - -__all__ = [ - "Videos", - "AsyncCompletions", - "Chat", - "Completions", - "Images", - "Embeddings", - "Files", - "FilesWithRawResponse", - "FineTuning", - "Batches", - "Knowledge", - "Tools", - "Assistant", -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/__init__.py deleted file mode 100644 index ce619aa7f0..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .assistant import Assistant - -__all__ = ["Assistant"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/assistant.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/assistant.py deleted file mode 100644 index c29b057498..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/assistant.py +++ /dev/null @@ -1,122 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -import httpx - -from ...core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - StreamResponse, - deepcopy_minimal, - make_request_options, - maybe_transform, -) -from ...types.assistant import AssistantCompletion -from ...types.assistant.assistant_conversation_resp import ConversationUsageListResp -from ...types.assistant.assistant_support_resp import AssistantSupportResp - -if TYPE_CHECKING: - from ..._client import ZhipuAI - -from ...types.assistant import assistant_conversation_params, assistant_create_params - -__all__ = ["Assistant"] - - -class Assistant(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def conversation( - self, - assistant_id: str, - model: str, - messages: list[assistant_create_params.ConversationMessage], - *, - stream: bool = True, - conversation_id: Optional[str] = None, - attachments: Optional[list[assistant_create_params.AssistantAttachments]] = None, - metadata: dict | None = None, - request_id: Optional[str] = None, - user_id: Optional[str] = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> StreamResponse[AssistantCompletion]: - body = deepcopy_minimal( - { - "assistant_id": assistant_id, - "model": model, - "messages": messages, - "stream": stream, - "conversation_id": conversation_id, - "attachments": attachments, - "metadata": metadata, - "request_id": request_id, - "user_id": user_id, - } - ) - return self._post( - "/assistant", - body=maybe_transform(body, assistant_create_params.AssistantParameters), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=AssistantCompletion, - stream=stream or True, - stream_cls=StreamResponse[AssistantCompletion], - ) - - def query_support( - self, - *, - assistant_id_list: Optional[list[str]] = None, - request_id: Optional[str] = None, - user_id: Optional[str] = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> AssistantSupportResp: - body = deepcopy_minimal( - { - "assistant_id_list": assistant_id_list, - "request_id": request_id, - "user_id": user_id, - } - ) - return self._post( - "/assistant/list", - body=body, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=AssistantSupportResp, - ) - - def query_conversation_usage( - self, - assistant_id: str, - page: int = 1, - page_size: int = 10, - *, - request_id: Optional[str] = None, - user_id: Optional[str] = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ConversationUsageListResp: - body = deepcopy_minimal( - { - "assistant_id": assistant_id, - "page": page, - "page_size": page_size, - "request_id": request_id, - "user_id": user_id, - } - ) - return self._post( - "/assistant/conversation/list", - body=maybe_transform(body, assistant_conversation_params.ConversationParameters), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=ConversationUsageListResp, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/batches.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/batches.py deleted file mode 100644 index ae2f2be85e..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/batches.py +++ /dev/null @@ -1,146 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Literal, Optional - -import httpx - -from ..core import NOT_GIVEN, BaseAPI, Body, Headers, NotGiven, make_request_options, maybe_transform -from ..core.pagination import SyncCursorPage -from ..types import batch_create_params, batch_list_params -from ..types.batch import Batch - -if TYPE_CHECKING: - from .._client import ZhipuAI - - -class Batches(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def create( - self, - *, - completion_window: str | None = None, - endpoint: Literal["/v1/chat/completions", "/v1/embeddings"], - input_file_id: str, - metadata: Optional[dict[str, str]] | NotGiven = NOT_GIVEN, - auto_delete_input_file: bool = True, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Batch: - return self._post( - "/batches", - body=maybe_transform( - { - "completion_window": completion_window, - "endpoint": endpoint, - "input_file_id": input_file_id, - "metadata": metadata, - "auto_delete_input_file": auto_delete_input_file, - }, - batch_create_params.BatchCreateParams, - ), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=Batch, - ) - - def retrieve( - self, - batch_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Batch: - """ - Retrieves a batch. - - Args: - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not batch_id: - raise ValueError(f"Expected a non-empty value for `batch_id` but received {batch_id!r}") - return self._get( - f"/batches/{batch_id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=Batch, - ) - - def list( - self, - *, - after: str | NotGiven = NOT_GIVEN, - limit: int | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> SyncCursorPage[Batch]: - """List your organization's batches. - - Args: - after: A cursor for use in pagination. - - `after` is an object ID that defines your place - in the list. For instance, if you make a list request and receive 100 objects, - ending with obj_foo, your subsequent call can include after=obj_foo in order to - fetch the next page of the list. - - limit: A limit on the number of objects to be returned. Limit can range between 1 and - 100, and the default is 20. - - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return self._get_api_list( - "/batches", - page=SyncCursorPage[Batch], - options=make_request_options( - extra_headers=extra_headers, - extra_body=extra_body, - timeout=timeout, - query=maybe_transform( - { - "after": after, - "limit": limit, - }, - batch_list_params.BatchListParams, - ), - ), - model=Batch, - ) - - def cancel( - self, - batch_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Batch: - """ - Cancels an in-progress batch. - - Args: - batch_id: The ID of the batch to cancel. - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - - """ - if not batch_id: - raise ValueError(f"Expected a non-empty value for `batch_id` but received {batch_id!r}") - return self._post( - f"/batches/{batch_id}/cancel", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=Batch, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/__init__.py deleted file mode 100644 index 5cd8dc6f33..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .async_completions import AsyncCompletions -from .chat import Chat -from .completions import Completions - -__all__ = ["AsyncCompletions", "Chat", "Completions"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/async_completions.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/async_completions.py deleted file mode 100644 index 05510a3ec4..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/async_completions.py +++ /dev/null @@ -1,115 +0,0 @@ -from __future__ import annotations - -import logging -from typing import TYPE_CHECKING, Literal, Optional, Union - -import httpx - -from ...core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - drop_prefix_image_data, - make_request_options, - maybe_transform, -) -from ...types.chat.async_chat_completion import AsyncCompletion, AsyncTaskStatus -from ...types.chat.code_geex import code_geex_params -from ...types.sensitive_word_check import SensitiveWordCheckRequest - -logger = logging.getLogger(__name__) - -if TYPE_CHECKING: - from ..._client import ZhipuAI - - -class AsyncCompletions(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def create( - self, - *, - model: str, - request_id: Optional[str] | NotGiven = NOT_GIVEN, - user_id: Optional[str] | NotGiven = NOT_GIVEN, - do_sample: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, - temperature: Optional[float] | NotGiven = NOT_GIVEN, - top_p: Optional[float] | NotGiven = NOT_GIVEN, - max_tokens: int | NotGiven = NOT_GIVEN, - seed: int | NotGiven = NOT_GIVEN, - messages: Union[str, list[str], list[int], list[list[int]], None], - stop: Optional[Union[str, list[str], None]] | NotGiven = NOT_GIVEN, - sensitive_word_check: Optional[SensitiveWordCheckRequest] | NotGiven = NOT_GIVEN, - tools: Optional[object] | NotGiven = NOT_GIVEN, - tool_choice: str | NotGiven = NOT_GIVEN, - meta: Optional[dict[str, str]] | NotGiven = NOT_GIVEN, - extra: Optional[code_geex_params.CodeGeexExtra] | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> AsyncTaskStatus: - _cast_type = AsyncTaskStatus - logger.debug(f"temperature:{temperature}, top_p:{top_p}") - if temperature is not None and temperature != NOT_GIVEN: - if temperature <= 0: - do_sample = False - temperature = 0.01 - # logger.warning("temperature:取值范围是:(0.0, 1.0) 开区间,do_sample重写为:false(参数top_p temperature不生效)") # noqa: E501 - if temperature >= 1: - temperature = 0.99 - # logger.warning("temperature:取值范围是:(0.0, 1.0) 开区间") - if top_p is not None and top_p != NOT_GIVEN: - if top_p >= 1: - top_p = 0.99 - # logger.warning("top_p:取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1") - if top_p <= 0: - top_p = 0.01 - # logger.warning("top_p:取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1") - - logger.debug(f"temperature:{temperature}, top_p:{top_p}") - if isinstance(messages, list): - for item in messages: - if item.get("content"): - item["content"] = drop_prefix_image_data(item["content"]) - - body = { - "model": model, - "request_id": request_id, - "user_id": user_id, - "temperature": temperature, - "top_p": top_p, - "do_sample": do_sample, - "max_tokens": max_tokens, - "seed": seed, - "messages": messages, - "stop": stop, - "sensitive_word_check": sensitive_word_check, - "tools": tools, - "tool_choice": tool_choice, - "meta": meta, - "extra": maybe_transform(extra, code_geex_params.CodeGeexExtra), - } - return self._post( - "/async/chat/completions", - body=body, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=_cast_type, - stream=False, - ) - - def retrieve_completion_result( - self, - id: str, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Union[AsyncCompletion, AsyncTaskStatus]: - _cast_type = Union[AsyncCompletion, AsyncTaskStatus] - return self._get( - path=f"/async-result/{id}", - cast_type=_cast_type, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/chat.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/chat.py deleted file mode 100644 index b3cc46566c..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/chat.py +++ /dev/null @@ -1,18 +0,0 @@ -from typing import TYPE_CHECKING - -from ...core import BaseAPI, cached_property -from .async_completions import AsyncCompletions -from .completions import Completions - -if TYPE_CHECKING: - pass - - -class Chat(BaseAPI): - @cached_property - def completions(self) -> Completions: - return Completions(self._client) - - @cached_property - def asyncCompletions(self) -> AsyncCompletions: # noqa: N802 - return AsyncCompletions(self._client) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/completions.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/completions.py deleted file mode 100644 index 8e5bb454e6..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/chat/completions.py +++ /dev/null @@ -1,108 +0,0 @@ -from __future__ import annotations - -import logging -from typing import TYPE_CHECKING, Literal, Optional, Union - -import httpx - -from ...core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - StreamResponse, - deepcopy_minimal, - drop_prefix_image_data, - make_request_options, - maybe_transform, -) -from ...types.chat.chat_completion import Completion -from ...types.chat.chat_completion_chunk import ChatCompletionChunk -from ...types.chat.code_geex import code_geex_params -from ...types.sensitive_word_check import SensitiveWordCheckRequest - -logger = logging.getLogger(__name__) - -if TYPE_CHECKING: - from ..._client import ZhipuAI - - -class Completions(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def create( - self, - *, - model: str, - request_id: Optional[str] | NotGiven = NOT_GIVEN, - user_id: Optional[str] | NotGiven = NOT_GIVEN, - do_sample: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, - stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, - temperature: Optional[float] | NotGiven = NOT_GIVEN, - top_p: Optional[float] | NotGiven = NOT_GIVEN, - max_tokens: int | NotGiven = NOT_GIVEN, - seed: int | NotGiven = NOT_GIVEN, - messages: Union[str, list[str], list[int], object, None], - stop: Optional[Union[str, list[str], None]] | NotGiven = NOT_GIVEN, - sensitive_word_check: Optional[SensitiveWordCheckRequest] | NotGiven = NOT_GIVEN, - tools: Optional[object] | NotGiven = NOT_GIVEN, - tool_choice: str | NotGiven = NOT_GIVEN, - meta: Optional[dict[str, str]] | NotGiven = NOT_GIVEN, - extra: Optional[code_geex_params.CodeGeexExtra] | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Completion | StreamResponse[ChatCompletionChunk]: - logger.debug(f"temperature:{temperature}, top_p:{top_p}") - if temperature is not None and temperature != NOT_GIVEN: - if temperature <= 0: - do_sample = False - temperature = 0.01 - # logger.warning("temperature:取值范围是:(0.0, 1.0) 开区间,do_sample重写为:false(参数top_p temperature不生效)") # noqa: E501 - if temperature >= 1: - temperature = 0.99 - # logger.warning("temperature:取值范围是:(0.0, 1.0) 开区间") - if top_p is not None and top_p != NOT_GIVEN: - if top_p >= 1: - top_p = 0.99 - # logger.warning("top_p:取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1") - if top_p <= 0: - top_p = 0.01 - # logger.warning("top_p:取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1") - - logger.debug(f"temperature:{temperature}, top_p:{top_p}") - if isinstance(messages, list): - for item in messages: - if item.get("content"): - item["content"] = drop_prefix_image_data(item["content"]) - - body = deepcopy_minimal( - { - "model": model, - "request_id": request_id, - "user_id": user_id, - "temperature": temperature, - "top_p": top_p, - "do_sample": do_sample, - "max_tokens": max_tokens, - "seed": seed, - "messages": messages, - "stop": stop, - "sensitive_word_check": sensitive_word_check, - "stream": stream, - "tools": tools, - "tool_choice": tool_choice, - "meta": meta, - "extra": maybe_transform(extra, code_geex_params.CodeGeexExtra), - } - ) - return self._post( - "/chat/completions", - body=body, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=Completion, - stream=stream or False, - stream_cls=StreamResponse[ChatCompletionChunk], - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/embeddings.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/embeddings.py deleted file mode 100644 index 4b4baef942..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/embeddings.py +++ /dev/null @@ -1,50 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional, Union - -import httpx - -from ..core import NOT_GIVEN, BaseAPI, Body, Headers, NotGiven, make_request_options -from ..types.embeddings import EmbeddingsResponded - -if TYPE_CHECKING: - from .._client import ZhipuAI - - -class Embeddings(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def create( - self, - *, - input: Union[str, list[str], list[int], list[list[int]]], - model: Union[str], - dimensions: Union[int] | NotGiven = NOT_GIVEN, - encoding_format: str | NotGiven = NOT_GIVEN, - user: str | NotGiven = NOT_GIVEN, - request_id: Optional[str] | NotGiven = NOT_GIVEN, - sensitive_word_check: Optional[object] | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - disable_strict_validation: Optional[bool] | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> EmbeddingsResponded: - _cast_type = EmbeddingsResponded - if disable_strict_validation: - _cast_type = object - return self._post( - "/embeddings", - body={ - "input": input, - "model": model, - "dimensions": dimensions, - "encoding_format": encoding_format, - "user": user, - "request_id": request_id, - "sensitive_word_check": sensitive_word_check, - }, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=_cast_type, - stream=False, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/files.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/files.py deleted file mode 100644 index c723f6f66e..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/files.py +++ /dev/null @@ -1,194 +0,0 @@ -from __future__ import annotations - -from collections.abc import Mapping -from typing import TYPE_CHECKING, Literal, Optional, cast - -import httpx - -from ..core import ( - NOT_GIVEN, - BaseAPI, - Body, - FileTypes, - Headers, - NotGiven, - _legacy_binary_response, - _legacy_response, - deepcopy_minimal, - extract_files, - make_request_options, - maybe_transform, -) -from ..types.files import FileDeleted, FileObject, ListOfFileObject, UploadDetail, file_create_params - -if TYPE_CHECKING: - from .._client import ZhipuAI - -__all__ = ["Files", "FilesWithRawResponse"] - - -class Files(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def create( - self, - *, - file: Optional[FileTypes] = None, - upload_detail: Optional[list[UploadDetail]] = None, - purpose: Literal["fine-tune", "retrieval", "batch"], - knowledge_id: Optional[str] = None, - sentence_size: Optional[int] = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FileObject: - if not file and not upload_detail: - raise ValueError("At least one of `file` and `upload_detail` must be provided.") - body = deepcopy_minimal( - { - "file": file, - "upload_detail": upload_detail, - "purpose": purpose, - "knowledge_id": knowledge_id, - "sentence_size": sentence_size, - } - ) - files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) - if files: - # It should be noted that the actual Content-Type header that will be - # sent to the server will contain a `boundary` parameter, e.g. - # multipart/form-data; boundary=---abc-- - extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} - return self._post( - "/files", - body=maybe_transform(body, file_create_params.FileCreateParams), - files=files, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=FileObject, - ) - - # def retrieve( - # self, - # file_id: str, - # *, - # extra_headers: Headers | None = None, - # extra_body: Body | None = None, - # timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - # ) -> FileObject: - # """ - # Returns information about a specific file. - # - # Args: - # file_id: The ID of the file to retrieve information about - # extra_headers: Send extra headers - # - # extra_body: Add additional JSON properties to the request - # - # timeout: Override the client-level default timeout for this request, in seconds - # """ - # if not file_id: - # raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") - # return self._get( - # f"/files/{file_id}", - # options=make_request_options( - # extra_headers=extra_headers, extra_body=extra_body, timeout=timeout - # ), - # cast_type=FileObject, - # ) - - def list( - self, - *, - purpose: str | NotGiven = NOT_GIVEN, - limit: int | NotGiven = NOT_GIVEN, - after: str | NotGiven = NOT_GIVEN, - order: str | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ListOfFileObject: - return self._get( - "/files", - cast_type=ListOfFileObject, - options=make_request_options( - extra_headers=extra_headers, - extra_body=extra_body, - timeout=timeout, - query={ - "purpose": purpose, - "limit": limit, - "after": after, - "order": order, - }, - ), - ) - - def delete( - self, - file_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FileDeleted: - """ - Delete a file. - - Args: - file_id: The ID of the file to delete - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not file_id: - raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") - return self._delete( - f"/files/{file_id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=FileDeleted, - ) - - def content( - self, - file_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> _legacy_response.HttpxBinaryResponseContent: - """ - Returns the contents of the specified file. - - Args: - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not file_id: - raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}") - extra_headers = {"Accept": "application/binary", **(extra_headers or {})} - return self._get( - f"/files/{file_id}/content", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=_legacy_binary_response.HttpxBinaryResponseContent, - ) - - -class FilesWithRawResponse: - def __init__(self, files: Files) -> None: - self._files = files - - self.create = _legacy_response.to_raw_response_wrapper( - files.create, - ) - self.list = _legacy_response.to_raw_response_wrapper( - files.list, - ) - self.content = _legacy_response.to_raw_response_wrapper( - files.content, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/__init__.py deleted file mode 100644 index 7c309b8341..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .fine_tuning import FineTuning -from .jobs import Jobs -from .models import FineTunedModels - -__all__ = ["Jobs", "FineTunedModels", "FineTuning"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/fine_tuning.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/fine_tuning.py deleted file mode 100644 index 8670f7de00..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/fine_tuning.py +++ /dev/null @@ -1,18 +0,0 @@ -from typing import TYPE_CHECKING - -from ...core import BaseAPI, cached_property -from .jobs import Jobs -from .models import FineTunedModels - -if TYPE_CHECKING: - pass - - -class FineTuning(BaseAPI): - @cached_property - def jobs(self) -> Jobs: - return Jobs(self._client) - - @cached_property - def models(self) -> FineTunedModels: - return FineTunedModels(self._client) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/jobs/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/jobs/__init__.py deleted file mode 100644 index 40777a153f..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/jobs/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .jobs import Jobs - -__all__ = ["Jobs"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/jobs/jobs.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/jobs/jobs.py deleted file mode 100644 index 8b038cadc0..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/jobs/jobs.py +++ /dev/null @@ -1,152 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -import httpx - -from ....core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - make_request_options, -) -from ....types.fine_tuning import ( - FineTuningJob, - FineTuningJobEvent, - ListOfFineTuningJob, - job_create_params, -) - -if TYPE_CHECKING: - from ...._client import ZhipuAI - -__all__ = ["Jobs"] - - -class Jobs(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def create( - self, - *, - model: str, - training_file: str, - hyperparameters: job_create_params.Hyperparameters | NotGiven = NOT_GIVEN, - suffix: Optional[str] | NotGiven = NOT_GIVEN, - request_id: Optional[str] | NotGiven = NOT_GIVEN, - validation_file: Optional[str] | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FineTuningJob: - return self._post( - "/fine_tuning/jobs", - body={ - "model": model, - "training_file": training_file, - "hyperparameters": hyperparameters, - "suffix": suffix, - "validation_file": validation_file, - "request_id": request_id, - }, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=FineTuningJob, - ) - - def retrieve( - self, - fine_tuning_job_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FineTuningJob: - return self._get( - f"/fine_tuning/jobs/{fine_tuning_job_id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=FineTuningJob, - ) - - def list( - self, - *, - after: str | NotGiven = NOT_GIVEN, - limit: int | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ListOfFineTuningJob: - return self._get( - "/fine_tuning/jobs", - cast_type=ListOfFineTuningJob, - options=make_request_options( - extra_headers=extra_headers, - extra_body=extra_body, - timeout=timeout, - query={ - "after": after, - "limit": limit, - }, - ), - ) - - def cancel( - self, - fine_tuning_job_id: str, - *, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # noqa: E501 - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FineTuningJob: - if not fine_tuning_job_id: - raise ValueError(f"Expected a non-empty value for `fine_tuning_job_id` but received {fine_tuning_job_id!r}") - return self._post( - f"/fine_tuning/jobs/{fine_tuning_job_id}/cancel", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=FineTuningJob, - ) - - def list_events( - self, - fine_tuning_job_id: str, - *, - after: str | NotGiven = NOT_GIVEN, - limit: int | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FineTuningJobEvent: - return self._get( - f"/fine_tuning/jobs/{fine_tuning_job_id}/events", - cast_type=FineTuningJobEvent, - options=make_request_options( - extra_headers=extra_headers, - extra_body=extra_body, - timeout=timeout, - query={ - "after": after, - "limit": limit, - }, - ), - ) - - def delete( - self, - fine_tuning_job_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FineTuningJob: - if not fine_tuning_job_id: - raise ValueError(f"Expected a non-empty value for `fine_tuning_job_id` but received {fine_tuning_job_id!r}") - return self._delete( - f"/fine_tuning/jobs/{fine_tuning_job_id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=FineTuningJob, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/models/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/models/__init__.py deleted file mode 100644 index d832635baf..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/models/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .fine_tuned_models import FineTunedModels - -__all__ = ["FineTunedModels"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/models/fine_tuned_models.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/models/fine_tuned_models.py deleted file mode 100644 index 29c023e3b1..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/fine_tuning/models/fine_tuned_models.py +++ /dev/null @@ -1,41 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -import httpx - -from ....core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - make_request_options, -) -from ....types.fine_tuning.models import FineTunedModelsStatus - -if TYPE_CHECKING: - from ...._client import ZhipuAI - -__all__ = ["FineTunedModels"] - - -class FineTunedModels(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def delete( - self, - fine_tuned_model: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> FineTunedModelsStatus: - if not fine_tuned_model: - raise ValueError(f"Expected a non-empty value for `fine_tuned_model` but received {fine_tuned_model!r}") - return self._delete( - f"fine_tuning/fine_tuned_models/{fine_tuned_model}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=FineTunedModelsStatus, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/images.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/images.py deleted file mode 100644 index 8ad411913f..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/images.py +++ /dev/null @@ -1,59 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -import httpx - -from ..core import NOT_GIVEN, BaseAPI, Body, Headers, NotGiven, make_request_options -from ..types.image import ImagesResponded -from ..types.sensitive_word_check import SensitiveWordCheckRequest - -if TYPE_CHECKING: - from .._client import ZhipuAI - - -class Images(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def generations( - self, - *, - prompt: str, - model: str | NotGiven = NOT_GIVEN, - n: Optional[int] | NotGiven = NOT_GIVEN, - quality: Optional[str] | NotGiven = NOT_GIVEN, - response_format: Optional[str] | NotGiven = NOT_GIVEN, - size: Optional[str] | NotGiven = NOT_GIVEN, - style: Optional[str] | NotGiven = NOT_GIVEN, - sensitive_word_check: Optional[SensitiveWordCheckRequest] | NotGiven = NOT_GIVEN, - user: str | NotGiven = NOT_GIVEN, - request_id: Optional[str] | NotGiven = NOT_GIVEN, - user_id: Optional[str] | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - disable_strict_validation: Optional[bool] | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ImagesResponded: - _cast_type = ImagesResponded - if disable_strict_validation: - _cast_type = object - return self._post( - "/images/generations", - body={ - "prompt": prompt, - "model": model, - "n": n, - "quality": quality, - "response_format": response_format, - "sensitive_word_check": sensitive_word_check, - "size": size, - "style": style, - "user": user, - "user_id": user_id, - "request_id": request_id, - }, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=_cast_type, - stream=False, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/__init__.py deleted file mode 100644 index 5a67d743c3..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .knowledge import Knowledge - -__all__ = ["Knowledge"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/__init__.py deleted file mode 100644 index fd289e2232..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .document import Document - -__all__ = ["Document"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/document.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/document.py deleted file mode 100644 index 492c49da66..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/document.py +++ /dev/null @@ -1,217 +0,0 @@ -from __future__ import annotations - -from collections.abc import Mapping -from typing import TYPE_CHECKING, Literal, Optional, cast - -import httpx - -from ....core import ( - NOT_GIVEN, - BaseAPI, - Body, - FileTypes, - Headers, - NotGiven, - deepcopy_minimal, - extract_files, - make_request_options, - maybe_transform, -) -from ....types.files import UploadDetail, file_create_params -from ....types.knowledge.document import DocumentData, DocumentObject, document_edit_params, document_list_params -from ....types.knowledge.document.document_list_resp import DocumentPage - -if TYPE_CHECKING: - from ...._client import ZhipuAI - -__all__ = ["Document"] - - -class Document(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def create( - self, - *, - file: Optional[FileTypes] = None, - custom_separator: Optional[list[str]] = None, - upload_detail: Optional[list[UploadDetail]] = None, - purpose: Literal["retrieval"], - knowledge_id: Optional[str] = None, - sentence_size: Optional[int] = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> DocumentObject: - if not file and not upload_detail: - raise ValueError("At least one of `file` and `upload_detail` must be provided.") - body = deepcopy_minimal( - { - "file": file, - "upload_detail": upload_detail, - "purpose": purpose, - "custom_separator": custom_separator, - "knowledge_id": knowledge_id, - "sentence_size": sentence_size, - } - ) - files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) - if files: - # It should be noted that the actual Content-Type header that will be - # sent to the server will contain a `boundary` parameter, e.g. - # multipart/form-data; boundary=---abc-- - extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} - return self._post( - "/files", - body=maybe_transform(body, file_create_params.FileCreateParams), - files=files, - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=DocumentObject, - ) - - def edit( - self, - document_id: str, - knowledge_type: str, - *, - custom_separator: Optional[list[str]] = None, - sentence_size: Optional[int] = None, - callback_url: Optional[str] = None, - callback_header: Optional[dict[str, str]] = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> httpx.Response: - """ - - Args: - document_id: 知识id - knowledge_type: 知识类型: - 1:文章知识: 支持pdf,url,docx - 2.问答知识-文档: 支持pdf,url,docx - 3.问答知识-表格: 支持xlsx - 4.商品库-表格: 支持xlsx - 5.自定义: 支持pdf,url,docx - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - :param knowledge_type: - :param document_id: - :param timeout: - :param extra_body: - :param callback_header: - :param sentence_size: - :param extra_headers: - :param callback_url: - :param custom_separator: - """ - if not document_id: - raise ValueError(f"Expected a non-empty value for `document_id` but received {document_id!r}") - - body = deepcopy_minimal( - { - "id": document_id, - "knowledge_type": knowledge_type, - "custom_separator": custom_separator, - "sentence_size": sentence_size, - "callback_url": callback_url, - "callback_header": callback_header, - } - ) - - return self._put( - f"/document/{document_id}", - body=maybe_transform(body, document_edit_params.DocumentEditParams), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=httpx.Response, - ) - - def list( - self, - knowledge_id: str, - *, - purpose: str | NotGiven = NOT_GIVEN, - page: str | NotGiven = NOT_GIVEN, - limit: str | NotGiven = NOT_GIVEN, - order: Literal["desc", "asc"] | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> DocumentPage: - return self._get( - "/files", - options=make_request_options( - extra_headers=extra_headers, - extra_body=extra_body, - timeout=timeout, - query=maybe_transform( - { - "knowledge_id": knowledge_id, - "purpose": purpose, - "page": page, - "limit": limit, - "order": order, - }, - document_list_params.DocumentListParams, - ), - ), - cast_type=DocumentPage, - ) - - def delete( - self, - document_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> httpx.Response: - """ - Delete a file. - - Args: - - document_id: 知识id - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not document_id: - raise ValueError(f"Expected a non-empty value for `document_id` but received {document_id!r}") - - return self._delete( - f"/document/{document_id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=httpx.Response, - ) - - def retrieve( - self, - document_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> DocumentData: - """ - - Args: - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not document_id: - raise ValueError(f"Expected a non-empty value for `document_id` but received {document_id!r}") - - return self._get( - f"/document/{document_id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=DocumentData, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/knowledge.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/knowledge.py deleted file mode 100644 index fea4c73ac9..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/knowledge.py +++ /dev/null @@ -1,173 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Literal, Optional - -import httpx - -from ...core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - cached_property, - deepcopy_minimal, - make_request_options, - maybe_transform, -) -from ...types.knowledge import KnowledgeInfo, KnowledgeUsed, knowledge_create_params, knowledge_list_params -from ...types.knowledge.knowledge_list_resp import KnowledgePage -from .document import Document - -if TYPE_CHECKING: - from ..._client import ZhipuAI - -__all__ = ["Knowledge"] - - -class Knowledge(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - @cached_property - def document(self) -> Document: - return Document(self._client) - - def create( - self, - embedding_id: int, - name: str, - *, - customer_identifier: Optional[str] = None, - description: Optional[str] = None, - background: Optional[Literal["blue", "red", "orange", "purple", "sky"]] = None, - icon: Optional[Literal["question", "book", "seal", "wrench", "tag", "horn", "house"]] = None, - bucket_id: Optional[str] = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> KnowledgeInfo: - body = deepcopy_minimal( - { - "embedding_id": embedding_id, - "name": name, - "customer_identifier": customer_identifier, - "description": description, - "background": background, - "icon": icon, - "bucket_id": bucket_id, - } - ) - return self._post( - "/knowledge", - body=maybe_transform(body, knowledge_create_params.KnowledgeBaseParams), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=KnowledgeInfo, - ) - - def modify( - self, - knowledge_id: str, - embedding_id: int, - *, - name: str, - description: Optional[str] = None, - background: Optional[Literal["blue", "red", "orange", "purple", "sky"]] = None, - icon: Optional[Literal["question", "book", "seal", "wrench", "tag", "horn", "house"]] = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> httpx.Response: - body = deepcopy_minimal( - { - "id": knowledge_id, - "embedding_id": embedding_id, - "name": name, - "description": description, - "background": background, - "icon": icon, - } - ) - return self._put( - f"/knowledge/{knowledge_id}", - body=maybe_transform(body, knowledge_create_params.KnowledgeBaseParams), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=httpx.Response, - ) - - def query( - self, - *, - page: int | NotGiven = 1, - size: int | NotGiven = 10, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> KnowledgePage: - return self._get( - "/knowledge", - options=make_request_options( - extra_headers=extra_headers, - extra_body=extra_body, - timeout=timeout, - query=maybe_transform( - { - "page": page, - "size": size, - }, - knowledge_list_params.KnowledgeListParams, - ), - ), - cast_type=KnowledgePage, - ) - - def delete( - self, - knowledge_id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> httpx.Response: - """ - Delete a file. - - Args: - knowledge_id: 知识库ID - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not knowledge_id: - raise ValueError("Expected a non-empty value for `knowledge_id`") - - return self._delete( - f"/knowledge/{knowledge_id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=httpx.Response, - ) - - def used( - self, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> KnowledgeUsed: - """ - Returns the contents of the specified file. - - Args: - extra_headers: Send extra headers - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return self._get( - "/knowledge/capacity", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=KnowledgeUsed, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/tools/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/tools/__init__.py deleted file mode 100644 index 43e4e37da1..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/tools/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .tools import Tools - -__all__ = ["Tools"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/tools/tools.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/tools/tools.py deleted file mode 100644 index 3c3a630aff..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/tools/tools.py +++ /dev/null @@ -1,65 +0,0 @@ -from __future__ import annotations - -import logging -from typing import TYPE_CHECKING, Literal, Optional, Union - -import httpx - -from ...core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - StreamResponse, - deepcopy_minimal, - make_request_options, - maybe_transform, -) -from ...types.tools import WebSearch, WebSearchChunk, tools_web_search_params - -logger = logging.getLogger(__name__) - -if TYPE_CHECKING: - from ..._client import ZhipuAI - -__all__ = ["Tools"] - - -class Tools(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def web_search( - self, - *, - model: str, - request_id: Optional[str] | NotGiven = NOT_GIVEN, - stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, - messages: Union[str, list[str], list[int], object, None], - scope: Optional[str] | NotGiven = NOT_GIVEN, - location: Optional[str] | NotGiven = NOT_GIVEN, - recent_days: Optional[int] | NotGiven = NOT_GIVEN, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> WebSearch | StreamResponse[WebSearchChunk]: - body = deepcopy_minimal( - { - "model": model, - "request_id": request_id, - "messages": messages, - "stream": stream, - "scope": scope, - "location": location, - "recent_days": recent_days, - } - ) - return self._post( - "/tools", - body=maybe_transform(body, tools_web_search_params.WebSearchParams), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=WebSearch, - stream=stream or False, - stream_cls=StreamResponse[WebSearchChunk], - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/__init__.py deleted file mode 100644 index 6b0f99ed09..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from .videos import ( - Videos, -) - -__all__ = [ - "Videos", -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/videos.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/videos.py deleted file mode 100644 index 71c8316602..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/videos.py +++ /dev/null @@ -1,77 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Optional - -import httpx - -from ...core import ( - NOT_GIVEN, - BaseAPI, - Body, - Headers, - NotGiven, - deepcopy_minimal, - make_request_options, - maybe_transform, -) -from ...types.sensitive_word_check import SensitiveWordCheckRequest -from ...types.video import VideoObject, video_create_params - -if TYPE_CHECKING: - from ..._client import ZhipuAI - -__all__ = ["Videos"] - - -class Videos(BaseAPI): - def __init__(self, client: ZhipuAI) -> None: - super().__init__(client) - - def generations( - self, - model: str, - *, - prompt: Optional[str] = None, - image_url: Optional[str] = None, - sensitive_word_check: Optional[SensitiveWordCheckRequest] | NotGiven = NOT_GIVEN, - request_id: Optional[str] = None, - user_id: Optional[str] = None, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> VideoObject: - if not model and not model: - raise ValueError("At least one of `model` and `prompt` must be provided.") - body = deepcopy_minimal( - { - "model": model, - "prompt": prompt, - "image_url": image_url, - "sensitive_word_check": sensitive_word_check, - "request_id": request_id, - "user_id": user_id, - } - ) - return self._post( - "/videos/generations", - body=maybe_transform(body, video_create_params.VideoCreateParams), - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=VideoObject, - ) - - def retrieve_videos_result( - self, - id: str, - *, - extra_headers: Headers | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> VideoObject: - if not id: - raise ValueError("At least one of `id` must be provided.") - - return self._get( - f"/async-result/{id}", - options=make_request_options(extra_headers=extra_headers, extra_body=extra_body, timeout=timeout), - cast_type=VideoObject, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/__init__.py deleted file mode 100644 index 3d6466d279..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/__init__.py +++ /dev/null @@ -1,108 +0,0 @@ -from ._base_api import BaseAPI -from ._base_compat import ( - PYDANTIC_V2, - ConfigDict, - GenericModel, - cached_property, - field_get_default, - get_args, - get_model_config, - get_model_fields, - get_origin, - is_literal_type, - is_union, - parse_obj, -) -from ._base_models import BaseModel, construct_type -from ._base_type import ( - NOT_GIVEN, - Body, - FileTypes, - Headers, - IncEx, - ModelT, - NotGiven, - Query, -) -from ._constants import ( - ZHIPUAI_DEFAULT_LIMITS, - ZHIPUAI_DEFAULT_MAX_RETRIES, - ZHIPUAI_DEFAULT_TIMEOUT, -) -from ._errors import ( - APIAuthenticationError, - APIConnectionError, - APIInternalError, - APIReachLimitError, - APIRequestFailedError, - APIResponseError, - APIResponseValidationError, - APIServerFlowExceedError, - APIStatusError, - APITimeoutError, - ZhipuAIError, -) -from ._files import is_file_content -from ._http_client import HttpClient, make_request_options -from ._sse_client import StreamResponse -from ._utils import ( - deepcopy_minimal, - drop_prefix_image_data, - extract_files, - is_given, - is_list, - is_mapping, - maybe_transform, - parse_date, - parse_datetime, -) - -__all__ = [ - "BaseModel", - "construct_type", - "BaseAPI", - "NOT_GIVEN", - "Headers", - "NotGiven", - "Body", - "IncEx", - "ModelT", - "Query", - "FileTypes", - "PYDANTIC_V2", - "ConfigDict", - "GenericModel", - "get_args", - "is_union", - "parse_obj", - "get_origin", - "is_literal_type", - "get_model_config", - "get_model_fields", - "field_get_default", - "is_file_content", - "ZhipuAIError", - "APIStatusError", - "APIRequestFailedError", - "APIAuthenticationError", - "APIReachLimitError", - "APIInternalError", - "APIServerFlowExceedError", - "APIResponseError", - "APIResponseValidationError", - "APITimeoutError", - "make_request_options", - "HttpClient", - "ZHIPUAI_DEFAULT_TIMEOUT", - "ZHIPUAI_DEFAULT_MAX_RETRIES", - "ZHIPUAI_DEFAULT_LIMITS", - "is_list", - "is_mapping", - "parse_date", - "parse_datetime", - "is_given", - "maybe_transform", - "deepcopy_minimal", - "extract_files", - "StreamResponse", -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_api.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_api.py deleted file mode 100644 index 3592ea6bac..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_api.py +++ /dev/null @@ -1,19 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from .._client import ZhipuAI - - -class BaseAPI: - _client: ZhipuAI - - def __init__(self, client: ZhipuAI) -> None: - self._client = client - self._delete = client.delete - self._get = client.get - self._post = client.post - self._put = client.put - self._patch = client.patch - self._get_api_list = client.get_api_list diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_compat.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_compat.py deleted file mode 100644 index 92a5d683be..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_compat.py +++ /dev/null @@ -1,209 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable -from datetime import date, datetime -from typing import TYPE_CHECKING, Any, Generic, TypeVar, Union, cast, overload - -import pydantic -from pydantic.fields import FieldInfo -from typing_extensions import Self - -from ._base_type import StrBytesIntFloat - -_T = TypeVar("_T") -_ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel) - -# --------------- Pydantic v2 compatibility --------------- - -# Pyright incorrectly reports some of our functions as overriding a method when they don't -# pyright: reportIncompatibleMethodOverride=false - -PYDANTIC_V2 = pydantic.VERSION.startswith("2.") - -# v1 re-exports -if TYPE_CHECKING: - - def parse_date(value: date | StrBytesIntFloat) -> date: ... - - def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: ... - - def get_args(t: type[Any]) -> tuple[Any, ...]: ... - - def is_union(tp: type[Any] | None) -> bool: ... - - def get_origin(t: type[Any]) -> type[Any] | None: ... - - def is_literal_type(type_: type[Any]) -> bool: ... - - def is_typeddict(type_: type[Any]) -> bool: ... - -else: - if PYDANTIC_V2: - from pydantic.v1.typing import ( # noqa: I001 - get_args as get_args, # noqa: PLC0414 - is_union as is_union, # noqa: PLC0414 - get_origin as get_origin, # noqa: PLC0414 - is_typeddict as is_typeddict, # noqa: PLC0414 - is_literal_type as is_literal_type, # noqa: PLC0414 - ) - from pydantic.v1.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime # noqa: PLC0414 - else: - from pydantic.typing import ( # noqa: I001 - get_args as get_args, # noqa: PLC0414 - is_union as is_union, # noqa: PLC0414 - get_origin as get_origin, # noqa: PLC0414 - is_typeddict as is_typeddict, # noqa: PLC0414 - is_literal_type as is_literal_type, # noqa: PLC0414 - ) - from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime # noqa: PLC0414 - - -# refactored config -if TYPE_CHECKING: - from pydantic import ConfigDict -else: - if PYDANTIC_V2: - from pydantic import ConfigDict - else: - # TODO: provide an error message here? - ConfigDict = None - - -# renamed methods / properties -def parse_obj(model: type[_ModelT], value: object) -> _ModelT: - if PYDANTIC_V2: - return model.model_validate(value) - else: - # pyright: ignore[reportDeprecated, reportUnnecessaryCast] - return cast(_ModelT, model.parse_obj(value)) - - -def field_is_required(field: FieldInfo) -> bool: - if PYDANTIC_V2: - return field.is_required() - return field.required # type: ignore - - -def field_get_default(field: FieldInfo) -> Any: - value = field.get_default() - if PYDANTIC_V2: - from pydantic_core import PydanticUndefined - - if value == PydanticUndefined: - return None - return value - return value - - -def field_outer_type(field: FieldInfo) -> Any: - if PYDANTIC_V2: - return field.annotation - return field.outer_type_ # type: ignore - - -def get_model_config(model: type[pydantic.BaseModel]) -> Any: - if PYDANTIC_V2: - return model.model_config - return model.__config__ # type: ignore - - -def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]: - if PYDANTIC_V2: - return model.model_fields - return model.__fields__ # type: ignore - - -def model_copy(model: _ModelT) -> _ModelT: - if PYDANTIC_V2: - return model.model_copy() - return model.copy() # type: ignore - - -def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str: - if PYDANTIC_V2: - return model.model_dump_json(indent=indent) - return model.json(indent=indent) # type: ignore - - -def model_dump( - model: pydantic.BaseModel, - *, - exclude_unset: bool = False, - exclude_defaults: bool = False, -) -> dict[str, Any]: - if PYDANTIC_V2: - return model.model_dump( - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - ) - return cast( - "dict[str, Any]", - model.dict( # pyright: ignore[reportDeprecated, reportUnnecessaryCast] - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - ), - ) - - -def model_parse(model: type[_ModelT], data: Any) -> _ModelT: - if PYDANTIC_V2: - return model.model_validate(data) - return model.parse_obj(data) # pyright: ignore[reportDeprecated] - - -# generic models -if TYPE_CHECKING: - - class GenericModel(pydantic.BaseModel): ... - -else: - if PYDANTIC_V2: - # there no longer needs to be a distinction in v2 but - # we still have to create our own subclass to avoid - # inconsistent MRO ordering errors - class GenericModel(pydantic.BaseModel): ... - - else: - import pydantic.generics - - class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ... - - -# cached properties -if TYPE_CHECKING: - cached_property = property - - # we define a separate type (copied from typeshed) - # that represents that `cached_property` is `set`able - # at runtime, which differs from `@property`. - # - # this is a separate type as editors likely special case - # `@property` and we don't want to cause issues just to have - # more helpful internal types. - - class typed_cached_property(Generic[_T]): # noqa: N801 - func: Callable[[Any], _T] - attrname: str | None - - def __init__(self, func: Callable[[Any], _T]) -> None: ... - - @overload - def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ... - - @overload - def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ... - - def __get__(self, instance: object, owner: type[Any] | None = None) -> _T | Self: - raise NotImplementedError() - - def __set_name__(self, owner: type[Any], name: str) -> None: ... - - # __set__ is not defined at runtime, but @cached_property is designed to be settable - def __set__(self, instance: object, value: _T) -> None: ... -else: - try: - from functools import cached_property - except ImportError: - from cached_property import cached_property - - typed_cached_property = cached_property diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_models.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_models.py deleted file mode 100644 index 69b1d3a83d..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_models.py +++ /dev/null @@ -1,670 +0,0 @@ -from __future__ import annotations - -import inspect -import os -from collections.abc import Callable -from datetime import date, datetime -from typing import TYPE_CHECKING, Any, ClassVar, Generic, Literal, TypeGuard, TypeVar, cast - -import pydantic -import pydantic.generics -from pydantic.fields import FieldInfo -from typing_extensions import ( - ParamSpec, - Protocol, - override, - runtime_checkable, -) - -from ._base_compat import ( - PYDANTIC_V2, - ConfigDict, - field_get_default, - get_args, - get_model_config, - get_model_fields, - get_origin, - is_literal_type, - is_union, - parse_obj, -) -from ._base_compat import ( - GenericModel as BaseGenericModel, -) -from ._base_type import ( - IncEx, - ModelT, -) -from ._utils import ( - PropertyInfo, - coerce_boolean, - extract_type_arg, - is_annotated_type, - is_list, - is_mapping, - parse_date, - parse_datetime, - strip_annotated_type, -) - -if TYPE_CHECKING: - from pydantic_core.core_schema import ModelField - -__all__ = ["BaseModel", "GenericModel"] -_BaseModelT = TypeVar("_BaseModelT", bound="BaseModel") - -_T = TypeVar("_T") -P = ParamSpec("P") - - -@runtime_checkable -class _ConfigProtocol(Protocol): - allow_population_by_field_name: bool - - -class BaseModel(pydantic.BaseModel): - if PYDANTIC_V2: - model_config: ClassVar[ConfigDict] = ConfigDict( - extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true")) - ) - else: - - @property - @override - def model_fields_set(self) -> set[str]: - # a forwards-compat shim for pydantic v2 - return self.__fields_set__ # type: ignore - - class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] - extra: Any = pydantic.Extra.allow # type: ignore - - def to_dict( - self, - *, - mode: Literal["json", "python"] = "python", - use_api_names: bool = True, - exclude_unset: bool = True, - exclude_defaults: bool = False, - exclude_none: bool = False, - warnings: bool = True, - ) -> dict[str, object]: - """Recursively generate a dictionary representation of the model, optionally specifying which fields to include or exclude. - - By default, fields that were not set by the API will not be included, - and keys will match the API response, *not* the property names from the model. - - For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property, - the output will use the `"fooBar"` key (unless `use_api_names=False` is passed). - - Args: - mode: - If mode is 'json', the dictionary will only contain JSON serializable types. e.g. `datetime` will be turned into a string, `"2024-3-22T18:11:19.117000Z"`. - If mode is 'python', the dictionary may contain any Python objects. e.g. `datetime(2024, 3, 22)` - - use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`. - exclude_unset: Whether to exclude fields that have not been explicitly set. - exclude_defaults: Whether to exclude fields that are set to their default value from the output. - exclude_none: Whether to exclude fields that have a value of `None` from the output. - warnings: Whether to log warnings when invalid fields are encountered. This is only supported in Pydantic v2. - """ # noqa: E501 - return self.model_dump( - mode=mode, - by_alias=use_api_names, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - warnings=warnings, - ) - - def to_json( - self, - *, - indent: int | None = 2, - use_api_names: bool = True, - exclude_unset: bool = True, - exclude_defaults: bool = False, - exclude_none: bool = False, - warnings: bool = True, - ) -> str: - """Generates a JSON string representing this model as it would be received from or sent to the API (but with indentation). - - By default, fields that were not set by the API will not be included, - and keys will match the API response, *not* the property names from the model. - - For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property, - the output will use the `"fooBar"` key (unless `use_api_names=False` is passed). - - Args: - indent: Indentation to use in the JSON output. If `None` is passed, the output will be compact. Defaults to `2` - use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`. - exclude_unset: Whether to exclude fields that have not been explicitly set. - exclude_defaults: Whether to exclude fields that have the default value. - exclude_none: Whether to exclude fields that have a value of `None`. - warnings: Whether to show any warnings that occurred during serialization. This is only supported in Pydantic v2. - """ # noqa: E501 - return self.model_dump_json( - indent=indent, - by_alias=use_api_names, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - warnings=warnings, - ) - - @override - def __str__(self) -> str: - # mypy complains about an invalid self arg - return f'{self.__repr_name__()}({self.__repr_str__(", ")})' # type: ignore[misc] - - # Override the 'construct' method in a way that supports recursive parsing without validation. - # Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836. - @classmethod - @override - def construct( - cls: type[ModelT], - _fields_set: set[str] | None = None, - **values: object, - ) -> ModelT: - m = cls.__new__(cls) - fields_values: dict[str, object] = {} - - config = get_model_config(cls) - populate_by_name = ( - config.allow_population_by_field_name - if isinstance(config, _ConfigProtocol) - else config.get("populate_by_name") - ) - - if _fields_set is None: - _fields_set = set() - - model_fields = get_model_fields(cls) - for name, field in model_fields.items(): - key = field.alias - if key is None or (key not in values and populate_by_name): - key = name - - if key in values: - fields_values[name] = _construct_field(value=values[key], field=field, key=key) - _fields_set.add(name) - else: - fields_values[name] = field_get_default(field) - - _extra = {} - for key, value in values.items(): - if key not in model_fields: - if PYDANTIC_V2: - _extra[key] = value - else: - _fields_set.add(key) - fields_values[key] = value - - object.__setattr__(m, "__dict__", fields_values) # noqa: PLC2801 - - if PYDANTIC_V2: - # these properties are copied from Pydantic's `model_construct()` method - object.__setattr__(m, "__pydantic_private__", None) # noqa: PLC2801 - object.__setattr__(m, "__pydantic_extra__", _extra) # noqa: PLC2801 - object.__setattr__(m, "__pydantic_fields_set__", _fields_set) # noqa: PLC2801 - else: - # init_private_attributes() does not exist in v2 - m._init_private_attributes() # type: ignore - - # copied from Pydantic v1's `construct()` method - object.__setattr__(m, "__fields_set__", _fields_set) # noqa: PLC2801 - - return m - - if not TYPE_CHECKING: - # type checkers incorrectly complain about this assignment - # because the type signatures are technically different - # although not in practice - model_construct = construct - - if not PYDANTIC_V2: - # we define aliases for some of the new pydantic v2 methods so - # that we can just document these methods without having to specify - # a specific pydantic version as some users may not know which - # pydantic version they are currently using - - @override - def model_dump( - self, - *, - mode: Literal["json", "python"] | str = "python", - include: IncEx = None, - exclude: IncEx = None, - by_alias: bool = False, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - round_trip: bool = False, - warnings: bool | Literal["none", "warn", "error"] = True, - context: dict[str, Any] | None = None, - serialize_as_any: bool = False, - ) -> dict[str, Any]: - """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump - - Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. - - Args: - mode: The mode in which `to_python` should run. - If mode is 'json', the dictionary will only contain JSON serializable types. - If mode is 'python', the dictionary may contain any Python objects. - include: A list of fields to include in the output. - exclude: A list of fields to exclude from the output. - by_alias: Whether to use the field's alias in the dictionary key if defined. - exclude_unset: Whether to exclude fields that are unset or None from the output. - exclude_defaults: Whether to exclude fields that are set to their default value from the output. - exclude_none: Whether to exclude fields that have a value of `None` from the output. - round_trip: Whether to enable serialization and deserialization round-trip support. - warnings: Whether to log warnings when invalid fields are encountered. - - Returns: - A dictionary representation of the model. - """ - if mode != "python": - raise ValueError("mode is only supported in Pydantic v2") - if round_trip != False: - raise ValueError("round_trip is only supported in Pydantic v2") - if warnings != True: - raise ValueError("warnings is only supported in Pydantic v2") - if context is not None: - raise ValueError("context is only supported in Pydantic v2") - if serialize_as_any != False: - raise ValueError("serialize_as_any is only supported in Pydantic v2") - return super().dict( # pyright: ignore[reportDeprecated] - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - - @override - def model_dump_json( - self, - *, - indent: int | None = None, - include: IncEx = None, - exclude: IncEx = None, - by_alias: bool = False, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - round_trip: bool = False, - warnings: bool | Literal["none", "warn", "error"] = True, - context: dict[str, Any] | None = None, - serialize_as_any: bool = False, - ) -> str: - """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json - - Generates a JSON representation of the model using Pydantic's `to_json` method. - - Args: - indent: Indentation to use in the JSON output. If None is passed, the output will be compact. - include: Field(s) to include in the JSON output. Can take either a string or set of strings. - exclude: Field(s) to exclude from the JSON output. Can take either a string or set of strings. - by_alias: Whether to serialize using field aliases. - exclude_unset: Whether to exclude fields that have not been explicitly set. - exclude_defaults: Whether to exclude fields that have the default value. - exclude_none: Whether to exclude fields that have a value of `None`. - round_trip: Whether to use serialization/deserialization between JSON and class instance. - warnings: Whether to show any warnings that occurred during serialization. - - Returns: - A JSON string representation of the model. - """ - if round_trip != False: - raise ValueError("round_trip is only supported in Pydantic v2") - if warnings != True: - raise ValueError("warnings is only supported in Pydantic v2") - if context is not None: - raise ValueError("context is only supported in Pydantic v2") - if serialize_as_any != False: - raise ValueError("serialize_as_any is only supported in Pydantic v2") - return super().json( # type: ignore[reportDeprecated] - indent=indent, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - - -def _construct_field(value: object, field: FieldInfo, key: str) -> object: - if value is None: - return field_get_default(field) - - if PYDANTIC_V2: - type_ = field.annotation - else: - type_ = cast(type, field.outer_type_) # type: ignore - - if type_ is None: - raise RuntimeError(f"Unexpected field type is None for {key}") - - return construct_type(value=value, type_=type_) - - -def is_basemodel(type_: type) -> bool: - """Returns whether or not the given type is either a `BaseModel` or a union of `BaseModel`""" - if is_union(type_): - return any(is_basemodel(variant) for variant in get_args(type_)) - - return is_basemodel_type(type_) - - -def is_basemodel_type(type_: type) -> TypeGuard[type[BaseModel] | type[GenericModel]]: - origin = get_origin(type_) or type_ - return issubclass(origin, BaseModel) or issubclass(origin, GenericModel) - - -def build( - base_model_cls: Callable[P, _BaseModelT], - *args: P.args, - **kwargs: P.kwargs, -) -> _BaseModelT: - """Construct a BaseModel class without validation. - - This is useful for cases where you need to instantiate a `BaseModel` - from an API response as this provides type-safe params which isn't supported - by helpers like `construct_type()`. - - ```py - build(MyModel, my_field_a="foo", my_field_b=123) - ``` - """ - if args: - raise TypeError( - "Received positional arguments which are not supported; Keyword arguments must be used instead", - ) - - return cast(_BaseModelT, construct_type(type_=base_model_cls, value=kwargs)) - - -def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T: - """Loose coercion to the expected type with construction of nested values. - - Note: the returned value from this function is not guaranteed to match the - given type. - """ - return cast(_T, construct_type(value=value, type_=type_)) - - -def construct_type(*, value: object, type_: type) -> object: - """Loose coercion to the expected type with construction of nested values. - - If the given value does not match the expected type then it is returned as-is. - """ - # we allow `object` as the input type because otherwise, passing things like - # `Literal['value']` will be reported as a type error by type checkers - type_ = cast("type[object]", type_) - - # unwrap `Annotated[T, ...]` -> `T` - if is_annotated_type(type_): - meta: tuple[Any, ...] = get_args(type_)[1:] - type_ = extract_type_arg(type_, 0) - else: - meta = () - # we need to use the origin class for any types that are subscripted generics - # e.g. Dict[str, object] - origin = get_origin(type_) or type_ - args = get_args(type_) - - if is_union(origin): - try: - return validate_type(type_=cast("type[object]", type_), value=value) - except Exception: - pass - - # if the type is a discriminated union then we want to construct the right variant - # in the union, even if the data doesn't match exactly, otherwise we'd break code - # that relies on the constructed class types, e.g. - # - # class FooType: - # kind: Literal['foo'] - # value: str - # - # class BarType: - # kind: Literal['bar'] - # value: int - # - # without this block, if the data we get is something like `{'kind': 'bar', 'value': 'foo'}` then - # we'd end up constructing `FooType` when it should be `BarType`. - discriminator = _build_discriminated_union_meta(union=type_, meta_annotations=meta) - if discriminator and is_mapping(value): - variant_value = value.get(discriminator.field_alias_from or discriminator.field_name) - if variant_value and isinstance(variant_value, str): - variant_type = discriminator.mapping.get(variant_value) - if variant_type: - return construct_type(type_=variant_type, value=value) - - # if the data is not valid, use the first variant that doesn't fail while deserializing - for variant in args: - try: - return construct_type(value=value, type_=variant) - except Exception: - continue - - raise RuntimeError(f"Could not convert data into a valid instance of {type_}") - if origin == dict: - if not is_mapping(value): - return value - - _, items_type = get_args(type_) # Dict[_, items_type] - return {key: construct_type(value=item, type_=items_type) for key, item in value.items()} - - if not is_literal_type(type_) and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel)): - if is_list(value): - return [cast(Any, type_).construct(**entry) if is_mapping(entry) else entry for entry in value] - - if is_mapping(value): - if issubclass(type_, BaseModel): - return type_.construct(**value) # type: ignore[arg-type] - - return cast(Any, type_).construct(**value) - - if origin == list: - if not is_list(value): - return value - - inner_type = args[0] # List[inner_type] - return [construct_type(value=entry, type_=inner_type) for entry in value] - - if origin == float: - if isinstance(value, int): - coerced = float(value) - if coerced != value: - return value - return coerced - - return value - - if type_ == datetime: - try: - return parse_datetime(value) # type: ignore - except Exception: - return value - - if type_ == date: - try: - return parse_date(value) # type: ignore - except Exception: - return value - - return value - - -@runtime_checkable -class CachedDiscriminatorType(Protocol): - __discriminator__: DiscriminatorDetails - - -class DiscriminatorDetails: - field_name: str - """The name of the discriminator field in the variant class, e.g. - - ```py - class Foo(BaseModel): - type: Literal['foo'] - ``` - - Will result in field_name='type' - """ - - field_alias_from: str | None - """The name of the discriminator field in the API response, e.g. - - ```py - class Foo(BaseModel): - type: Literal['foo'] = Field(alias='type_from_api') - ``` - - Will result in field_alias_from='type_from_api' - """ - - mapping: dict[str, type] - """Mapping of discriminator value to variant type, e.g. - - {'foo': FooVariant, 'bar': BarVariant} - """ - - def __init__( - self, - *, - mapping: dict[str, type], - discriminator_field: str, - discriminator_alias: str | None, - ) -> None: - self.mapping = mapping - self.field_name = discriminator_field - self.field_alias_from = discriminator_alias - - -def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, ...]) -> DiscriminatorDetails | None: - if isinstance(union, CachedDiscriminatorType): - return union.__discriminator__ - - discriminator_field_name: str | None = None - - for annotation in meta_annotations: - if isinstance(annotation, PropertyInfo) and annotation.discriminator is not None: - discriminator_field_name = annotation.discriminator - break - - if not discriminator_field_name: - return None - - mapping: dict[str, type] = {} - discriminator_alias: str | None = None - - for variant in get_args(union): - variant = strip_annotated_type(variant) - if is_basemodel_type(variant): - if PYDANTIC_V2: - field = _extract_field_schema_pv2(variant, discriminator_field_name) - if not field: - continue - - # Note: if one variant defines an alias then they all should - discriminator_alias = field.get("serialization_alias") - - field_schema = field["schema"] - - if field_schema["type"] == "literal": - for entry in cast("LiteralSchema", field_schema)["expected"]: - if isinstance(entry, str): - mapping[entry] = variant - else: - field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] - if not field_info: - continue - - # Note: if one variant defines an alias then they all should - discriminator_alias = field_info.alias - - if field_info.annotation and is_literal_type(field_info.annotation): - for entry in get_args(field_info.annotation): - if isinstance(entry, str): - mapping[entry] = variant - - if not mapping: - return None - - details = DiscriminatorDetails( - mapping=mapping, - discriminator_field=discriminator_field_name, - discriminator_alias=discriminator_alias, - ) - cast(CachedDiscriminatorType, union).__discriminator__ = details - return details - - -def _extract_field_schema_pv2(model: type[BaseModel], field_name: str) -> ModelField | None: - schema = model.__pydantic_core_schema__ - if schema["type"] != "model": - return None - - fields_schema = schema["schema"] - if fields_schema["type"] != "model-fields": - return None - - fields_schema = cast("ModelFieldsSchema", fields_schema) - - field = fields_schema["fields"].get(field_name) - if not field: - return None - - return cast("ModelField", field) # pyright: ignore[reportUnnecessaryCast] - - -def validate_type(*, type_: type[_T], value: object) -> _T: - """Strict validation that the given value matches the expected type""" - if inspect.isclass(type_) and issubclass(type_, pydantic.BaseModel): - return cast(_T, parse_obj(type_, value)) - - return cast(_T, _validate_non_model_type(type_=type_, value=value)) - - -# Subclassing here confuses type checkers, so we treat this class as non-inheriting. -if TYPE_CHECKING: - GenericModel = BaseModel -else: - - class GenericModel(BaseGenericModel, BaseModel): - pass - - -if PYDANTIC_V2: - from pydantic import TypeAdapter - - def _validate_non_model_type(*, type_: type[_T], value: object) -> _T: - return TypeAdapter(type_).validate_python(value) - -elif not TYPE_CHECKING: - - class TypeAdapter(Generic[_T]): - """Used as a placeholder to easily convert runtime types to a Pydantic format - to provide validation. - - For example: - ```py - validated = RootModel[int](__root__="5").__root__ - # validated: 5 - ``` - """ - - def __init__(self, type_: type[_T]): - self.type_ = type_ - - def validate_python(self, value: Any) -> _T: - if not isinstance(value, self.type_): - raise ValueError(f"Invalid type: {value} is not of type {self.type_}") - return value - - def _validate_non_model_type(*, type_: type[_T], value: object) -> _T: - return TypeAdapter(type_).validate_python(value) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_type.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_type.py deleted file mode 100644 index ea1d3f09dc..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_type.py +++ /dev/null @@ -1,170 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable, Mapping, Sequence -from os import PathLike -from typing import ( - IO, - TYPE_CHECKING, - Any, - Literal, - Optional, - TypeAlias, - TypeVar, - Union, -) - -import pydantic -from httpx import Response -from typing_extensions import Protocol, TypedDict, override, runtime_checkable - -Query = Mapping[str, object] -Body = object -AnyMapping = Mapping[str, object] -PrimitiveData = Union[str, int, float, bool, None] -Data = Union[PrimitiveData, list[Any], tuple[Any], "Mapping[str, Any]"] -ModelT = TypeVar("ModelT", bound=pydantic.BaseModel) -_T = TypeVar("_T") - -if TYPE_CHECKING: - NoneType: type[None] -else: - NoneType = type(None) - - -# Sentinel class used until PEP 0661 is accepted -class NotGiven: - """ - A sentinel singleton class used to distinguish omitted keyword arguments - from those passed in with the value None (which may have different behavior). - - For example: - - ```py - def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ... - - get(timeout=1) # 1s timeout - get(timeout=None) # No timeout - get() # Default timeout behavior, which may not be statically known at the method definition. - ``` - """ - - def __bool__(self) -> Literal[False]: - return False - - @override - def __repr__(self) -> str: - return "NOT_GIVEN" - - -NotGivenOr = Union[_T, NotGiven] -NOT_GIVEN = NotGiven() - - -class Omit: - """In certain situations you need to be able to represent a case where a default value has - to be explicitly removed and `None` is not an appropriate substitute, for example: - - ```py - # as the default `Content-Type` header is `application/json` that will be sent - client.post('/upload/files', files={'file': b'my raw file content'}) - - # you can't explicitly override the header as it has to be dynamically generated - # to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983' - client.post(..., headers={'Content-Type': 'multipart/form-data'}) - - # instead you can remove the default `application/json` header by passing Omit - client.post(..., headers={'Content-Type': Omit()}) - ``` - """ - - def __bool__(self) -> Literal[False]: - return False - - -@runtime_checkable -class ModelBuilderProtocol(Protocol): - @classmethod - def build( - cls: type[_T], - *, - response: Response, - data: object, - ) -> _T: ... - - -Headers = Mapping[str, Union[str, Omit]] - - -class HeadersLikeProtocol(Protocol): - def get(self, __key: str) -> str | None: ... - - -HeadersLike = Union[Headers, HeadersLikeProtocol] - -ResponseT = TypeVar( - "ResponseT", - bound="Union[str, None, BaseModel, list[Any], dict[str, Any], Response, UnknownResponse, ModelBuilderProtocol, BinaryResponseContent]", # noqa: E501 -) - -StrBytesIntFloat = Union[str, bytes, int, float] - -# Note: copied from Pydantic -# https://github.com/pydantic/pydantic/blob/32ea570bf96e84234d2992e1ddf40ab8a565925a/pydantic/main.py#L49 -IncEx: TypeAlias = "set[int] | set[str] | dict[int, Any] | dict[str, Any] | None" - -PostParser = Callable[[Any], Any] - - -@runtime_checkable -class InheritsGeneric(Protocol): - """Represents a type that has inherited from `Generic` - - The `__orig_bases__` property can be used to determine the resolved - type variable for a given base class. - """ - - __orig_bases__: tuple[_GenericAlias] - - -class _GenericAlias(Protocol): - __origin__: type[object] - - -class HttpxSendArgs(TypedDict, total=False): - auth: httpx.Auth - - -# for user input files -if TYPE_CHECKING: - Base64FileInput = Union[IO[bytes], PathLike[str]] - FileContent = Union[IO[bytes], bytes, PathLike[str]] -else: - Base64FileInput = Union[IO[bytes], PathLike] - FileContent = Union[IO[bytes], bytes, PathLike] - -FileTypes = Union[ - # file (or bytes) - FileContent, - # (filename, file (or bytes)) - tuple[Optional[str], FileContent], - # (filename, file (or bytes), content_type) - tuple[Optional[str], FileContent, Optional[str]], - # (filename, file (or bytes), content_type, headers) - tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], -] -RequestFiles = Union[Mapping[str, FileTypes], Sequence[tuple[str, FileTypes]]] - -# duplicate of the above but without our custom file support -HttpxFileContent = Union[bytes, IO[bytes]] -HttpxFileTypes = Union[ - # file (or bytes) - HttpxFileContent, - # (filename, file (or bytes)) - tuple[Optional[str], HttpxFileContent], - # (filename, file (or bytes), content_type) - tuple[Optional[str], HttpxFileContent, Optional[str]], - # (filename, file (or bytes), content_type, headers) - tuple[Optional[str], HttpxFileContent, Optional[str], Mapping[str, str]], -] - -HttpxRequestFiles = Union[Mapping[str, HttpxFileTypes], Sequence[tuple[str, HttpxFileTypes]]] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_constants.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_constants.py deleted file mode 100644 index 8e43bdebec..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_constants.py +++ /dev/null @@ -1,12 +0,0 @@ -import httpx - -RAW_RESPONSE_HEADER = "X-Stainless-Raw-Response" -# 通过 `Timeout` 控制接口`connect` 和 `read` 超时时间,默认为`timeout=300.0, connect=8.0` -ZHIPUAI_DEFAULT_TIMEOUT = httpx.Timeout(timeout=300.0, connect=8.0) -# 通过 `retry` 参数控制重试次数,默认为3次 -ZHIPUAI_DEFAULT_MAX_RETRIES = 3 -# 通过 `Limits` 控制最大连接数和保持连接数,默认为`max_connections=50, max_keepalive_connections=10` -ZHIPUAI_DEFAULT_LIMITS = httpx.Limits(max_connections=50, max_keepalive_connections=10) - -INITIAL_RETRY_DELAY = 0.5 -MAX_RETRY_DELAY = 8.0 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_errors.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_errors.py deleted file mode 100644 index e2c9d24c6c..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_errors.py +++ /dev/null @@ -1,86 +0,0 @@ -from __future__ import annotations - -import httpx - -__all__ = [ - "ZhipuAIError", - "APIStatusError", - "APIRequestFailedError", - "APIAuthenticationError", - "APIReachLimitError", - "APIInternalError", - "APIServerFlowExceedError", - "APIResponseError", - "APIResponseValidationError", - "APITimeoutError", - "APIConnectionError", -] - - -class ZhipuAIError(Exception): - def __init__( - self, - message: str, - ) -> None: - super().__init__(message) - - -class APIStatusError(ZhipuAIError): - response: httpx.Response - status_code: int - - def __init__(self, message: str, *, response: httpx.Response) -> None: - super().__init__(message) - self.response = response - self.status_code = response.status_code - - -class APIRequestFailedError(APIStatusError): ... - - -class APIAuthenticationError(APIStatusError): ... - - -class APIReachLimitError(APIStatusError): ... - - -class APIInternalError(APIStatusError): ... - - -class APIServerFlowExceedError(APIStatusError): ... - - -class APIResponseError(ZhipuAIError): - message: str - request: httpx.Request - json_data: object - - def __init__(self, message: str, request: httpx.Request, json_data: object): - self.message = message - self.request = request - self.json_data = json_data - super().__init__(message) - - -class APIResponseValidationError(APIResponseError): - status_code: int - response: httpx.Response - - def __init__(self, response: httpx.Response, json_data: object | None, *, message: str | None = None) -> None: - super().__init__( - message=message or "Data returned by API invalid for expected schema.", - request=response.request, - json_data=json_data, - ) - self.response = response - self.status_code = response.status_code - - -class APIConnectionError(APIResponseError): - def __init__(self, *, message: str = "Connection error.", request: httpx.Request) -> None: - super().__init__(message, request, json_data=None) - - -class APITimeoutError(APIConnectionError): - def __init__(self, request: httpx.Request) -> None: - super().__init__(message="Request timed out.", request=request) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_files.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_files.py deleted file mode 100644 index f9d2e14d9e..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_files.py +++ /dev/null @@ -1,75 +0,0 @@ -from __future__ import annotations - -import io -import os -import pathlib -from typing import TypeGuard, overload - -from ._base_type import ( - Base64FileInput, - FileContent, - FileTypes, - HttpxFileContent, - HttpxFileTypes, - HttpxRequestFiles, - RequestFiles, -) -from ._utils import is_mapping_t, is_sequence_t, is_tuple_t - - -def is_base64_file_input(obj: object) -> TypeGuard[Base64FileInput]: - return isinstance(obj, io.IOBase | os.PathLike) - - -def is_file_content(obj: object) -> TypeGuard[FileContent]: - return isinstance(obj, bytes | tuple | io.IOBase | os.PathLike) - - -def assert_is_file_content(obj: object, *, key: str | None = None) -> None: - if not is_file_content(obj): - prefix = f"Expected entry at `{key}`" if key is not None else f"Expected file input `{obj!r}`" - raise RuntimeError( - f"{prefix} to be bytes, an io.IOBase instance, PathLike or a tuple but received {type(obj)} instead. See https://github.com/openai/openai-python/tree/main#file-uploads" - ) from None - - -@overload -def to_httpx_files(files: None) -> None: ... - - -@overload -def to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ... - - -def to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: - if files is None: - return None - - if is_mapping_t(files): - files = {key: _transform_file(file) for key, file in files.items()} - elif is_sequence_t(files): - files = [(key, _transform_file(file)) for key, file in files] - else: - raise TypeError(f"Unexpected file type input {type(files)}, expected mapping or sequence") - - return files - - -def _transform_file(file: FileTypes) -> HttpxFileTypes: - if is_file_content(file): - if isinstance(file, os.PathLike): - path = pathlib.Path(file) - return (path.name, path.read_bytes()) - - return file - - if is_tuple_t(file): - return (file[0], _read_file_content(file[1]), *file[2:]) - - raise TypeError("Expected file types input to be a FileContent type or to be a tuple") - - -def _read_file_content(file: FileContent) -> HttpxFileContent: - if isinstance(file, os.PathLike): - return pathlib.Path(file).read_bytes() - return file diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_http_client.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_http_client.py deleted file mode 100644 index ffdafb85d5..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_http_client.py +++ /dev/null @@ -1,910 +0,0 @@ -from __future__ import annotations - -import inspect -import logging -import time -import warnings -from collections.abc import Iterator, Mapping -from itertools import starmap -from random import random -from typing import TYPE_CHECKING, Any, Generic, Literal, Optional, TypeVar, Union, cast, overload - -import httpx -import pydantic -from httpx import URL, Timeout - -from . import _errors, get_origin -from ._base_compat import model_copy -from ._base_models import GenericModel, construct_type, validate_type -from ._base_type import ( - NOT_GIVEN, - AnyMapping, - Body, - Data, - Headers, - HttpxSendArgs, - ModelBuilderProtocol, - NotGiven, - Omit, - PostParser, - Query, - RequestFiles, - ResponseT, -) -from ._constants import ( - INITIAL_RETRY_DELAY, - MAX_RETRY_DELAY, - RAW_RESPONSE_HEADER, - ZHIPUAI_DEFAULT_LIMITS, - ZHIPUAI_DEFAULT_MAX_RETRIES, - ZHIPUAI_DEFAULT_TIMEOUT, -) -from ._errors import APIConnectionError, APIResponseValidationError, APIStatusError, APITimeoutError -from ._files import to_httpx_files -from ._legacy_response import LegacyAPIResponse -from ._request_opt import FinalRequestOptions, UserRequestInput -from ._response import APIResponse, BaseAPIResponse, extract_response_type -from ._sse_client import StreamResponse -from ._utils import flatten, is_given, is_mapping - -log: logging.Logger = logging.getLogger(__name__) - -# TODO: make base page type vars covariant -SyncPageT = TypeVar("SyncPageT", bound="BaseSyncPage[Any]") -# AsyncPageT = TypeVar("AsyncPageT", bound="BaseAsyncPage[Any]") - -_T = TypeVar("_T") -_T_co = TypeVar("_T_co", covariant=True) - -if TYPE_CHECKING: - from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT -else: - try: - from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT - except ImportError: - # taken from https://github.com/encode/httpx/blob/3ba5fe0d7ac70222590e759c31442b1cab263791/httpx/_config.py#L366 - HTTPX_DEFAULT_TIMEOUT = Timeout(5.0) - - -headers = { - "Accept": "application/json", - "Content-Type": "application/json; charset=UTF-8", -} - - -class PageInfo: - """Stores the necessary information to build the request to retrieve the next page. - - Either `url` or `params` must be set. - """ - - url: URL | NotGiven - params: Query | NotGiven - - @overload - def __init__( - self, - *, - url: URL, - ) -> None: ... - - @overload - def __init__( - self, - *, - params: Query, - ) -> None: ... - - def __init__( - self, - *, - url: URL | NotGiven = NOT_GIVEN, - params: Query | NotGiven = NOT_GIVEN, - ) -> None: - self.url = url - self.params = params - - -class BasePage(GenericModel, Generic[_T]): - """ - Defines the core interface for pagination. - - Type Args: - ModelT: The pydantic model that represents an item in the response. - - Methods: - has_next_page(): Check if there is another page available - next_page_info(): Get the necessary information to make a request for the next page - """ - - _options: FinalRequestOptions = pydantic.PrivateAttr() - _model: type[_T] = pydantic.PrivateAttr() - - def has_next_page(self) -> bool: - items = self._get_page_items() - if not items: - return False - return self.next_page_info() is not None - - def next_page_info(self) -> Optional[PageInfo]: ... - - def _get_page_items(self) -> Iterable[_T]: # type: ignore[empty-body] - ... - - def _params_from_url(self, url: URL) -> httpx.QueryParams: - # TODO: do we have to preprocess params here? - return httpx.QueryParams(cast(Any, self._options.params)).merge(url.params) - - def _info_to_options(self, info: PageInfo) -> FinalRequestOptions: - options = model_copy(self._options) - options._strip_raw_response_header() - - if not isinstance(info.params, NotGiven): - options.params = {**options.params, **info.params} - return options - - if not isinstance(info.url, NotGiven): - params = self._params_from_url(info.url) - url = info.url.copy_with(params=params) - options.params = dict(url.params) - options.url = str(url) - return options - - raise ValueError("Unexpected PageInfo state") - - -class BaseSyncPage(BasePage[_T], Generic[_T]): - _client: HttpClient = pydantic.PrivateAttr() - - def _set_private_attributes( - self, - client: HttpClient, - model: type[_T], - options: FinalRequestOptions, - ) -> None: - self._model = model - self._client = client - self._options = options - - # Pydantic uses a custom `__iter__` method to support casting BaseModels - # to dictionaries. e.g. dict(model). - # As we want to support `for item in page`, this is inherently incompatible - # with the default pydantic behavior. It is not possible to support both - # use cases at once. Fortunately, this is not a big deal as all other pydantic - # methods should continue to work as expected as there is an alternative method - # to cast a model to a dictionary, model.dict(), which is used internally - # by pydantic. - def __iter__(self) -> Iterator[_T]: # type: ignore - for page in self.iter_pages(): - yield from page._get_page_items() - - def iter_pages(self: SyncPageT) -> Iterator[SyncPageT]: - page = self - while True: - yield page - if page.has_next_page(): - page = page.get_next_page() - else: - return - - def get_next_page(self: SyncPageT) -> SyncPageT: - info = self.next_page_info() - if not info: - raise RuntimeError( - "No next page expected; please check `.has_next_page()` before calling `.get_next_page()`." - ) - - options = self._info_to_options(info) - return self._client._request_api_list(self._model, page=self.__class__, options=options) - - -class HttpClient: - _client: httpx.Client - _version: str - _base_url: URL - max_retries: int - timeout: Union[float, Timeout, None] - _limits: httpx.Limits - _has_custom_http_client: bool - _default_stream_cls: type[StreamResponse[Any]] | None = None - - _strict_response_validation: bool - - def __init__( - self, - *, - version: str, - base_url: URL, - _strict_response_validation: bool, - max_retries: int = ZHIPUAI_DEFAULT_MAX_RETRIES, - timeout: Union[float, Timeout, None], - limits: httpx.Limits | None = None, - custom_httpx_client: httpx.Client | None = None, - custom_headers: Mapping[str, str] | None = None, - ) -> None: - if limits is not None: - warnings.warn( - "The `connection_pool_limits` argument is deprecated. The `http_client` argument should be passed instead", # noqa: E501 - category=DeprecationWarning, - stacklevel=3, - ) - if custom_httpx_client is not None: - raise ValueError("The `http_client` argument is mutually exclusive with `connection_pool_limits`") - else: - limits = ZHIPUAI_DEFAULT_LIMITS - - if not is_given(timeout): - if custom_httpx_client and custom_httpx_client.timeout != HTTPX_DEFAULT_TIMEOUT: - timeout = custom_httpx_client.timeout - else: - timeout = ZHIPUAI_DEFAULT_TIMEOUT - self.max_retries = max_retries - self.timeout = timeout - self._limits = limits - self._has_custom_http_client = bool(custom_httpx_client) - self._client = custom_httpx_client or httpx.Client( - base_url=base_url, - timeout=self.timeout, - limits=limits, - ) - self._version = version - url = URL(url=base_url) - if not url.raw_path.endswith(b"/"): - url = url.copy_with(raw_path=url.raw_path + b"/") - self._base_url = url - self._custom_headers = custom_headers or {} - self._strict_response_validation = _strict_response_validation - - def _prepare_url(self, url: str) -> URL: - sub_url = URL(url) - if sub_url.is_relative_url: - request_raw_url = self._base_url.raw_path + sub_url.raw_path.lstrip(b"/") - return self._base_url.copy_with(raw_path=request_raw_url) - - return sub_url - - @property - def _default_headers(self): - return { - "Accept": "application/json", - "Content-Type": "application/json; charset=UTF-8", - "ZhipuAI-SDK-Ver": self._version, - "source_type": "zhipu-sdk-python", - "x-request-sdk": "zhipu-sdk-python", - **self.auth_headers, - **self._custom_headers, - } - - @property - def custom_auth(self) -> httpx.Auth | None: - return None - - @property - def auth_headers(self): - return {} - - def _prepare_headers(self, options: FinalRequestOptions) -> httpx.Headers: - custom_headers = options.headers or {} - headers_dict = _merge_mappings(self._default_headers, custom_headers) - - httpx_headers = httpx.Headers(headers_dict) - - return httpx_headers - - def _remaining_retries( - self, - remaining_retries: Optional[int], - options: FinalRequestOptions, - ) -> int: - return remaining_retries if remaining_retries is not None else options.get_max_retries(self.max_retries) - - def _calculate_retry_timeout( - self, - remaining_retries: int, - options: FinalRequestOptions, - response_headers: Optional[httpx.Headers] = None, - ) -> float: - max_retries = options.get_max_retries(self.max_retries) - - # If the API asks us to wait a certain amount of time (and it's a reasonable amount), just do what it says. - # retry_after = self._parse_retry_after_header(response_headers) - # if retry_after is not None and 0 < retry_after <= 60: - # return retry_after - - nb_retries = max_retries - remaining_retries - - # Apply exponential backoff, but not more than the max. - sleep_seconds = min(INITIAL_RETRY_DELAY * pow(2.0, nb_retries), MAX_RETRY_DELAY) - - # Apply some jitter, plus-or-minus half a second. - jitter = 1 - 0.25 * random() - timeout = sleep_seconds * jitter - return max(timeout, 0) - - def _build_request(self, options: FinalRequestOptions) -> httpx.Request: - kwargs: dict[str, Any] = {} - headers = self._prepare_headers(options) - url = self._prepare_url(options.url) - json_data = options.json_data - if options.extra_json is not None: - if json_data is None: - json_data = cast(Body, options.extra_json) - elif is_mapping(json_data): - json_data = _merge_mappings(json_data, options.extra_json) - else: - raise RuntimeError(f"Unexpected JSON data type, {type(json_data)}, cannot merge with `extra_body`") - - content_type = headers.get("Content-Type") - # multipart/form-data; boundary=---abc-- - if headers.get("Content-Type") == "multipart/form-data": - if "boundary" not in content_type: - # only remove the header if the boundary hasn't been explicitly set - # as the caller doesn't want httpx to come up with their own boundary - headers.pop("Content-Type") - - if json_data: - kwargs["data"] = self._make_multipartform(json_data) - - return self._client.build_request( - headers=headers, - timeout=self.timeout if isinstance(options.timeout, NotGiven) else options.timeout, - method=options.method, - url=url, - json=json_data, - files=options.files, - params=options.params, - **kwargs, - ) - - def _object_to_formdata(self, key: str, value: Data | Mapping[object, object]) -> list[tuple[str, str]]: - items = [] - - if isinstance(value, Mapping): - for k, v in value.items(): - items.extend(self._object_to_formdata(f"{key}[{k}]", v)) - return items - if isinstance(value, list | tuple): - for v in value: - items.extend(self._object_to_formdata(key + "[]", v)) - return items - - def _primitive_value_to_str(val) -> str: - # copied from httpx - if val is True: - return "true" - elif val is False: - return "false" - elif val is None: - return "" - return str(val) - - str_data = _primitive_value_to_str(value) - - if not str_data: - return [] - return [(key, str_data)] - - def _make_multipartform(self, data: Mapping[object, object]) -> dict[str, object]: - items = flatten(list(starmap(self._object_to_formdata, data.items()))) - - serialized: dict[str, object] = {} - for key, value in items: - if key in serialized: - raise ValueError(f"存在重复的键: {key};") - serialized[key] = value - return serialized - - def _process_response_data( - self, - *, - data: object, - cast_type: type[ResponseT], - response: httpx.Response, - ) -> ResponseT: - if data is None: - return cast(ResponseT, None) - - if cast_type is object: - return cast(ResponseT, data) - - try: - if inspect.isclass(cast_type) and issubclass(cast_type, ModelBuilderProtocol): - return cast(ResponseT, cast_type.build(response=response, data=data)) - - if self._strict_response_validation: - return cast(ResponseT, validate_type(type_=cast_type, value=data)) - - return cast(ResponseT, construct_type(type_=cast_type, value=data)) - except pydantic.ValidationError as err: - raise APIResponseValidationError(response=response, json_data=data) from err - - def _should_stream_response_body(self, request: httpx.Request) -> bool: - return request.headers.get(RAW_RESPONSE_HEADER) == "stream" # type: ignore[no-any-return] - - def _should_retry(self, response: httpx.Response) -> bool: - # Note: this is not a standard header - should_retry_header = response.headers.get("x-should-retry") - - # If the server explicitly says whether or not to retry, obey. - if should_retry_header == "true": - log.debug("Retrying as header `x-should-retry` is set to `true`") - return True - if should_retry_header == "false": - log.debug("Not retrying as header `x-should-retry` is set to `false`") - return False - - # Retry on request timeouts. - if response.status_code == 408: - log.debug("Retrying due to status code %i", response.status_code) - return True - - # Retry on lock timeouts. - if response.status_code == 409: - log.debug("Retrying due to status code %i", response.status_code) - return True - - # Retry on rate limits. - if response.status_code == 429: - log.debug("Retrying due to status code %i", response.status_code) - return True - - # Retry internal errors. - if response.status_code >= 500: - log.debug("Retrying due to status code %i", response.status_code) - return True - - log.debug("Not retrying") - return False - - def is_closed(self) -> bool: - return self._client.is_closed - - def close(self): - self._client.close() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.close() - - def request( - self, - cast_type: type[ResponseT], - options: FinalRequestOptions, - remaining_retries: Optional[int] = None, - *, - stream: bool = False, - stream_cls: type[StreamResponse] | None = None, - ) -> ResponseT | StreamResponse: - return self._request( - cast_type=cast_type, - options=options, - stream=stream, - stream_cls=stream_cls, - remaining_retries=remaining_retries, - ) - - def _request( - self, - *, - cast_type: type[ResponseT], - options: FinalRequestOptions, - remaining_retries: int | None, - stream: bool, - stream_cls: type[StreamResponse] | None, - ) -> ResponseT | StreamResponse: - retries = self._remaining_retries(remaining_retries, options) - request = self._build_request(options) - - kwargs: HttpxSendArgs = {} - if self.custom_auth is not None: - kwargs["auth"] = self.custom_auth - try: - response = self._client.send( - request, - stream=stream or self._should_stream_response_body(request=request), - **kwargs, - ) - except httpx.TimeoutException as err: - log.debug("Encountered httpx.TimeoutException", exc_info=True) - - if retries > 0: - return self._retry_request( - options, - cast_type, - retries, - stream=stream, - stream_cls=stream_cls, - response_headers=None, - ) - - log.debug("Raising timeout error") - raise APITimeoutError(request=request) from err - except Exception as err: - log.debug("Encountered Exception", exc_info=True) - - if retries > 0: - return self._retry_request( - options, - cast_type, - retries, - stream=stream, - stream_cls=stream_cls, - response_headers=None, - ) - - log.debug("Raising connection error") - raise APIConnectionError(request=request) from err - - log.debug( - 'HTTP Request: %s %s "%i %s"', request.method, request.url, response.status_code, response.reason_phrase - ) - - try: - response.raise_for_status() - except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code - log.debug("Encountered httpx.HTTPStatusError", exc_info=True) - - if retries > 0 and self._should_retry(err.response): - err.response.close() - return self._retry_request( - options, - cast_type, - retries, - err.response.headers, - stream=stream, - stream_cls=stream_cls, - ) - - # If the response is streamed then we need to explicitly read the response - # to completion before attempting to access the response text. - if not err.response.is_closed: - err.response.read() - - log.debug("Re-raising status error") - raise self._make_status_error(err.response) from None - - # return self._parse_response( - # cast_type=cast_type, - # options=options, - # response=response, - # stream=stream, - # stream_cls=stream_cls, - # ) - return self._process_response( - cast_type=cast_type, - options=options, - response=response, - stream=stream, - stream_cls=stream_cls, - ) - - def _retry_request( - self, - options: FinalRequestOptions, - cast_type: type[ResponseT], - remaining_retries: int, - response_headers: httpx.Headers | None, - *, - stream: bool, - stream_cls: type[StreamResponse] | None, - ) -> ResponseT | StreamResponse: - remaining = remaining_retries - 1 - if remaining == 1: - log.debug("1 retry left") - else: - log.debug("%i retries left", remaining) - - timeout = self._calculate_retry_timeout(remaining, options, response_headers) - log.info("Retrying request to %s in %f seconds", options.url, timeout) - - # In a synchronous context we are blocking the entire thread. Up to the library user to run the client in a - # different thread if necessary. - time.sleep(timeout) - - return self._request( - options=options, - cast_type=cast_type, - remaining_retries=remaining, - stream=stream, - stream_cls=stream_cls, - ) - - def _process_response( - self, - *, - cast_type: type[ResponseT], - options: FinalRequestOptions, - response: httpx.Response, - stream: bool, - stream_cls: type[StreamResponse] | None, - ) -> ResponseT: - # _legacy_response with raw_response_header to parser method - if response.request.headers.get(RAW_RESPONSE_HEADER) == "true": - return cast( - ResponseT, - LegacyAPIResponse( - raw=response, - client=self, - cast_type=cast_type, - stream=stream, - stream_cls=stream_cls, - options=options, - ), - ) - - origin = get_origin(cast_type) or cast_type - - if inspect.isclass(origin) and issubclass(origin, BaseAPIResponse): - if not issubclass(origin, APIResponse): - raise TypeError(f"API Response types must subclass {APIResponse}; Received {origin}") - - response_cls = cast("type[BaseAPIResponse[Any]]", cast_type) - return cast( - ResponseT, - response_cls( - raw=response, - client=self, - cast_type=extract_response_type(response_cls), - stream=stream, - stream_cls=stream_cls, - options=options, - ), - ) - - if cast_type == httpx.Response: - return cast(ResponseT, response) - - api_response = APIResponse( - raw=response, - client=self, - cast_type=cast("type[ResponseT]", cast_type), # pyright: ignore[reportUnnecessaryCast] - stream=stream, - stream_cls=stream_cls, - options=options, - ) - if bool(response.request.headers.get(RAW_RESPONSE_HEADER)): - return cast(ResponseT, api_response) - - return api_response.parse() - - def _request_api_list( - self, - model: type[object], - page: type[SyncPageT], - options: FinalRequestOptions, - ) -> SyncPageT: - def _parser(resp: SyncPageT) -> SyncPageT: - resp._set_private_attributes( - client=self, - model=model, - options=options, - ) - return resp - - options.post_parser = _parser - - return self.request(page, options, stream=False) - - @overload - def get( - self, - path: str, - *, - cast_type: type[ResponseT], - options: UserRequestInput = {}, - stream: Literal[False] = False, - ) -> ResponseT: ... - - @overload - def get( - self, - path: str, - *, - cast_type: type[ResponseT], - options: UserRequestInput = {}, - stream: Literal[True], - stream_cls: type[StreamResponse], - ) -> StreamResponse: ... - - @overload - def get( - self, - path: str, - *, - cast_type: type[ResponseT], - options: UserRequestInput = {}, - stream: bool, - stream_cls: type[StreamResponse] | None = None, - ) -> ResponseT | StreamResponse: ... - - def get( - self, - path: str, - *, - cast_type: type[ResponseT], - options: UserRequestInput = {}, - stream: bool = False, - stream_cls: type[StreamResponse] | None = None, - ) -> ResponseT: - opts = FinalRequestOptions.construct(method="get", url=path, **options) - return cast(ResponseT, self.request(cast_type, opts, stream=stream, stream_cls=stream_cls)) - - @overload - def post( - self, - path: str, - *, - cast_type: type[ResponseT], - body: Body | None = None, - options: UserRequestInput = {}, - files: RequestFiles | None = None, - stream: Literal[False] = False, - ) -> ResponseT: ... - - @overload - def post( - self, - path: str, - *, - cast_type: type[ResponseT], - body: Body | None = None, - options: UserRequestInput = {}, - files: RequestFiles | None = None, - stream: Literal[True], - stream_cls: type[StreamResponse], - ) -> StreamResponse: ... - - @overload - def post( - self, - path: str, - *, - cast_type: type[ResponseT], - body: Body | None = None, - options: UserRequestInput = {}, - files: RequestFiles | None = None, - stream: bool, - stream_cls: type[StreamResponse] | None = None, - ) -> ResponseT | StreamResponse: ... - - def post( - self, - path: str, - *, - cast_type: type[ResponseT], - body: Body | None = None, - options: UserRequestInput = {}, - files: RequestFiles | None = None, - stream: bool = False, - stream_cls: type[StreamResponse[Any]] | None = None, - ) -> ResponseT | StreamResponse: - opts = FinalRequestOptions.construct( - method="post", url=path, json_data=body, files=to_httpx_files(files), **options - ) - - return cast(ResponseT, self.request(cast_type, opts, stream=stream, stream_cls=stream_cls)) - - def patch( - self, - path: str, - *, - cast_type: type[ResponseT], - body: Body | None = None, - options: UserRequestInput = {}, - ) -> ResponseT: - opts = FinalRequestOptions.construct(method="patch", url=path, json_data=body, **options) - - return self.request( - cast_type=cast_type, - options=opts, - ) - - def put( - self, - path: str, - *, - cast_type: type[ResponseT], - body: Body | None = None, - options: UserRequestInput = {}, - files: RequestFiles | None = None, - ) -> ResponseT | StreamResponse: - opts = FinalRequestOptions.construct( - method="put", url=path, json_data=body, files=to_httpx_files(files), **options - ) - - return self.request( - cast_type=cast_type, - options=opts, - ) - - def delete( - self, - path: str, - *, - cast_type: type[ResponseT], - body: Body | None = None, - options: UserRequestInput = {}, - ) -> ResponseT | StreamResponse: - opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) - - return self.request( - cast_type=cast_type, - options=opts, - ) - - def get_api_list( - self, - path: str, - *, - model: type[object], - page: type[SyncPageT], - body: Body | None = None, - options: UserRequestInput = {}, - method: str = "get", - ) -> SyncPageT: - opts = FinalRequestOptions.construct(method=method, url=path, json_data=body, **options) - return self._request_api_list(model, page, opts) - - def _make_status_error(self, response) -> APIStatusError: - response_text = response.text.strip() - status_code = response.status_code - error_msg = f"Error code: {status_code}, with error text {response_text}" - - if status_code == 400: - return _errors.APIRequestFailedError(message=error_msg, response=response) - elif status_code == 401: - return _errors.APIAuthenticationError(message=error_msg, response=response) - elif status_code == 429: - return _errors.APIReachLimitError(message=error_msg, response=response) - elif status_code == 500: - return _errors.APIInternalError(message=error_msg, response=response) - elif status_code == 503: - return _errors.APIServerFlowExceedError(message=error_msg, response=response) - return APIStatusError(message=error_msg, response=response) - - -def make_request_options( - *, - query: Query | None = None, - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - post_parser: PostParser | NotGiven = NOT_GIVEN, -) -> UserRequestInput: - """Create a dict of type RequestOptions without keys of NotGiven values.""" - options: UserRequestInput = {} - if extra_headers is not None: - options["headers"] = extra_headers - - if extra_body is not None: - options["extra_json"] = cast(AnyMapping, extra_body) - - if query is not None: - options["params"] = query - - if extra_query is not None: - options["params"] = {**options.get("params", {}), **extra_query} - - if not isinstance(timeout, NotGiven): - options["timeout"] = timeout - - if is_given(post_parser): - # internal - options["post_parser"] = post_parser # type: ignore - - return options - - -def _merge_mappings( - obj1: Mapping[_T_co, Union[_T, Omit]], - obj2: Mapping[_T_co, Union[_T, Omit]], -) -> dict[_T_co, _T]: - """Merge two mappings of the same type, removing any values that are instances of `Omit`. - - In cases with duplicate keys the second mapping takes precedence. - """ - merged = {**obj1, **obj2} - return {key: value for key, value in merged.items() if not isinstance(value, Omit)} diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_jwt_token.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_jwt_token.py deleted file mode 100644 index 21f158a5f4..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_jwt_token.py +++ /dev/null @@ -1,31 +0,0 @@ -import time - -import cachetools.func -import jwt - -# 缓存时间 3分钟 -CACHE_TTL_SECONDS = 3 * 60 - -# token 有效期比缓存时间 多30秒 -API_TOKEN_TTL_SECONDS = CACHE_TTL_SECONDS + 30 - - -@cachetools.func.ttl_cache(maxsize=10, ttl=CACHE_TTL_SECONDS) -def generate_token(apikey: str): - try: - api_key, secret = apikey.split(".") - except Exception as e: - raise Exception("invalid api_key", e) - - payload = { - "api_key": api_key, - "exp": int(round(time.time() * 1000)) + API_TOKEN_TTL_SECONDS * 1000, - "timestamp": int(round(time.time() * 1000)), - } - ret = jwt.encode( - payload, - secret, - algorithm="HS256", - headers={"alg": "HS256", "sign_type": "SIGN"}, - ) - return ret diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_legacy_binary_response.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_legacy_binary_response.py deleted file mode 100644 index 51623bd860..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_legacy_binary_response.py +++ /dev/null @@ -1,207 +0,0 @@ -from __future__ import annotations - -import os -from collections.abc import AsyncIterator, Iterator -from typing import Any - -import httpx - - -class HttpxResponseContent: - @property - def content(self) -> bytes: - raise NotImplementedError("This method is not implemented for this class.") - - @property - def text(self) -> str: - raise NotImplementedError("This method is not implemented for this class.") - - @property - def encoding(self) -> str | None: - raise NotImplementedError("This method is not implemented for this class.") - - @property - def charset_encoding(self) -> str | None: - raise NotImplementedError("This method is not implemented for this class.") - - def json(self, **kwargs: Any) -> Any: - raise NotImplementedError("This method is not implemented for this class.") - - def read(self) -> bytes: - raise NotImplementedError("This method is not implemented for this class.") - - def iter_bytes(self, chunk_size: int | None = None) -> Iterator[bytes]: - raise NotImplementedError("This method is not implemented for this class.") - - def iter_text(self, chunk_size: int | None = None) -> Iterator[str]: - raise NotImplementedError("This method is not implemented for this class.") - - def iter_lines(self) -> Iterator[str]: - raise NotImplementedError("This method is not implemented for this class.") - - def iter_raw(self, chunk_size: int | None = None) -> Iterator[bytes]: - raise NotImplementedError("This method is not implemented for this class.") - - def write_to_file( - self, - file: str | os.PathLike[str], - ) -> None: - raise NotImplementedError("This method is not implemented for this class.") - - def stream_to_file( - self, - file: str | os.PathLike[str], - *, - chunk_size: int | None = None, - ) -> None: - raise NotImplementedError("This method is not implemented for this class.") - - def close(self) -> None: - raise NotImplementedError("This method is not implemented for this class.") - - async def aread(self) -> bytes: - raise NotImplementedError("This method is not implemented for this class.") - - async def aiter_bytes(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: - raise NotImplementedError("This method is not implemented for this class.") - - async def aiter_text(self, chunk_size: int | None = None) -> AsyncIterator[str]: - raise NotImplementedError("This method is not implemented for this class.") - - async def aiter_lines(self) -> AsyncIterator[str]: - raise NotImplementedError("This method is not implemented for this class.") - - async def aiter_raw(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: - raise NotImplementedError("This method is not implemented for this class.") - - async def astream_to_file( - self, - file: str | os.PathLike[str], - *, - chunk_size: int | None = None, - ) -> None: - raise NotImplementedError("This method is not implemented for this class.") - - async def aclose(self) -> None: - raise NotImplementedError("This method is not implemented for this class.") - - -class HttpxBinaryResponseContent(HttpxResponseContent): - response: httpx.Response - - def __init__(self, response: httpx.Response) -> None: - self.response = response - - @property - def content(self) -> bytes: - return self.response.content - - @property - def encoding(self) -> str | None: - return self.response.encoding - - @property - def charset_encoding(self) -> str | None: - return self.response.charset_encoding - - def read(self) -> bytes: - return self.response.read() - - def text(self) -> str: - raise NotImplementedError("Not implemented for binary response content") - - def json(self, **kwargs: Any) -> Any: - raise NotImplementedError("Not implemented for binary response content") - - def iter_text(self, chunk_size: int | None = None) -> Iterator[str]: - raise NotImplementedError("Not implemented for binary response content") - - def iter_lines(self) -> Iterator[str]: - raise NotImplementedError("Not implemented for binary response content") - - async def aiter_text(self, chunk_size: int | None = None) -> AsyncIterator[str]: - raise NotImplementedError("Not implemented for binary response content") - - async def aiter_lines(self) -> AsyncIterator[str]: - raise NotImplementedError("Not implemented for binary response content") - - def iter_bytes(self, chunk_size: int | None = None) -> Iterator[bytes]: - return self.response.iter_bytes(chunk_size) - - def iter_raw(self, chunk_size: int | None = None) -> Iterator[bytes]: - return self.response.iter_raw(chunk_size) - - def write_to_file( - self, - file: str | os.PathLike[str], - ) -> None: - """Write the output to the given file. - - Accepts a filename or any path-like object, e.g. pathlib.Path - - Note: if you want to stream the data to the file instead of writing - all at once then you should use `.with_streaming_response` when making - the API request, e.g. `client.with_streaming_response.foo().stream_to_file('my_filename.txt')` - """ - with open(file, mode="wb") as f: - for data in self.response.iter_bytes(): - f.write(data) - - def stream_to_file( - self, - file: str | os.PathLike[str], - *, - chunk_size: int | None = None, - ) -> None: - with open(file, mode="wb") as f: - for data in self.response.iter_bytes(chunk_size): - f.write(data) - - def close(self) -> None: - return self.response.close() - - async def aread(self) -> bytes: - return await self.response.aread() - - async def aiter_bytes(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: - return self.response.aiter_bytes(chunk_size) - - async def aiter_raw(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: - return self.response.aiter_raw(chunk_size) - - async def astream_to_file( - self, - file: str | os.PathLike[str], - *, - chunk_size: int | None = None, - ) -> None: - path = anyio.Path(file) - async with await path.open(mode="wb") as f: - async for data in self.response.aiter_bytes(chunk_size): - await f.write(data) - - async def aclose(self) -> None: - return await self.response.aclose() - - -class HttpxTextBinaryResponseContent(HttpxBinaryResponseContent): - response: httpx.Response - - @property - def text(self) -> str: - return self.response.text - - def json(self, **kwargs: Any) -> Any: - return self.response.json(**kwargs) - - def iter_text(self, chunk_size: int | None = None) -> Iterator[str]: - return self.response.iter_text(chunk_size) - - def iter_lines(self) -> Iterator[str]: - return self.response.iter_lines() - - async def aiter_text(self, chunk_size: int | None = None) -> AsyncIterator[str]: - return self.response.aiter_text(chunk_size) - - async def aiter_lines(self) -> AsyncIterator[str]: - return self.response.aiter_lines() diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_legacy_response.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_legacy_response.py deleted file mode 100644 index 51bf21bcdc..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_legacy_response.py +++ /dev/null @@ -1,341 +0,0 @@ -from __future__ import annotations - -import datetime -import functools -import inspect -import logging -from collections.abc import Callable -from typing import TYPE_CHECKING, Any, Generic, TypeVar, Union, cast, get_origin, overload - -import httpx -import pydantic -from typing_extensions import ParamSpec, override - -from ._base_models import BaseModel, is_basemodel -from ._base_type import NoneType -from ._constants import RAW_RESPONSE_HEADER -from ._errors import APIResponseValidationError -from ._legacy_binary_response import HttpxResponseContent, HttpxTextBinaryResponseContent -from ._sse_client import StreamResponse, extract_stream_chunk_type, is_stream_class_type -from ._utils import extract_type_arg, is_annotated_type, is_given - -if TYPE_CHECKING: - from ._http_client import HttpClient - from ._request_opt import FinalRequestOptions - -P = ParamSpec("P") -R = TypeVar("R") -_T = TypeVar("_T") - -log: logging.Logger = logging.getLogger(__name__) - - -class LegacyAPIResponse(Generic[R]): - """This is a legacy class as it will be replaced by `APIResponse` - and `AsyncAPIResponse` in the `_response.py` file in the next major - release. - - For the sync client this will mostly be the same with the exception - of `content` & `text` will be methods instead of properties. In the - async client, all methods will be async. - - A migration script will be provided & the migration in general should - be smooth. - """ - - _cast_type: type[R] - _client: HttpClient - _parsed_by_type: dict[type[Any], Any] - _stream: bool - _stream_cls: type[StreamResponse[Any]] | None - _options: FinalRequestOptions - - http_response: httpx.Response - - def __init__( - self, - *, - raw: httpx.Response, - cast_type: type[R], - client: HttpClient, - stream: bool, - stream_cls: type[StreamResponse[Any]] | None, - options: FinalRequestOptions, - ) -> None: - self._cast_type = cast_type - self._client = client - self._parsed_by_type = {} - self._stream = stream - self._stream_cls = stream_cls - self._options = options - self.http_response = raw - - @property - def request_id(self) -> str | None: - return self.http_response.headers.get("x-request-id") # type: ignore[no-any-return] - - @overload - def parse(self, *, to: type[_T]) -> _T: ... - - @overload - def parse(self) -> R: ... - - def parse(self, *, to: type[_T] | None = None) -> R | _T: - """Returns the rich python representation of this response's data. - - NOTE: For the async client: this will become a coroutine in the next major version. - - For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`. - - You can customize the type that the response is parsed into through - the `to` argument, e.g. - - ```py - from zhipuai import BaseModel - - - class MyModel(BaseModel): - foo: str - - - obj = response.parse(to=MyModel) - print(obj.foo) - ``` - - We support parsing: - - `BaseModel` - - `dict` - - `list` - - `Union` - - `str` - - `int` - - `float` - - `httpx.Response` - """ - cache_key = to if to is not None else self._cast_type - cached = self._parsed_by_type.get(cache_key) - if cached is not None: - return cached # type: ignore[no-any-return] - - parsed = self._parse(to=to) - if is_given(self._options.post_parser): - parsed = self._options.post_parser(parsed) - - self._parsed_by_type[cache_key] = parsed - return parsed - - @property - def headers(self) -> httpx.Headers: - return self.http_response.headers - - @property - def http_request(self) -> httpx.Request: - return self.http_response.request - - @property - def status_code(self) -> int: - return self.http_response.status_code - - @property - def url(self) -> httpx.URL: - return self.http_response.url - - @property - def method(self) -> str: - return self.http_request.method - - @property - def content(self) -> bytes: - """Return the binary response content. - - NOTE: this will be removed in favour of `.read()` in the - next major version. - """ - return self.http_response.content - - @property - def text(self) -> str: - """Return the decoded response content. - - NOTE: this will be turned into a method in the next major version. - """ - return self.http_response.text - - @property - def http_version(self) -> str: - return self.http_response.http_version - - @property - def is_closed(self) -> bool: - return self.http_response.is_closed - - @property - def elapsed(self) -> datetime.timedelta: - """The time taken for the complete request/response cycle to complete.""" - return self.http_response.elapsed - - def _parse(self, *, to: type[_T] | None = None) -> R | _T: - # unwrap `Annotated[T, ...]` -> `T` - if to and is_annotated_type(to): - to = extract_type_arg(to, 0) - - if self._stream: - if to: - if not is_stream_class_type(to): - raise TypeError(f"Expected custom parse type to be a subclass of {StreamResponse}") - - return cast( - _T, - to( - cast_type=extract_stream_chunk_type( - to, - failure_message="Expected custom stream type to be passed with a type argument, e.g. StreamResponse[ChunkType]", # noqa: E501 - ), - response=self.http_response, - client=cast(Any, self._client), - ), - ) - - if self._stream_cls: - return cast( - R, - self._stream_cls( - cast_type=extract_stream_chunk_type(self._stream_cls), - response=self.http_response, - client=cast(Any, self._client), - ), - ) - - stream_cls = cast("type[StreamResponse[Any]] | None", self._client._default_stream_cls) - if stream_cls is None: - raise MissingStreamClassError() - - return cast( - R, - stream_cls( - cast_type=self._cast_type, - response=self.http_response, - client=cast(Any, self._client), - ), - ) - - cast_type = to if to is not None else self._cast_type - - # unwrap `Annotated[T, ...]` -> `T` - if is_annotated_type(cast_type): - cast_type = extract_type_arg(cast_type, 0) - - if cast_type is NoneType: - return cast(R, None) - - response = self.http_response - if cast_type == str: - return cast(R, response.text) - - if cast_type == int: - return cast(R, int(response.text)) - - if cast_type == float: - return cast(R, float(response.text)) - - origin = get_origin(cast_type) or cast_type - - if inspect.isclass(origin) and issubclass(origin, HttpxResponseContent): - # in the response, e.g. mime file - *_, filename = response.headers.get("content-disposition", "").split("filename=") - # 判断文件类型是jsonl类型的使用HttpxTextBinaryResponseContent - if filename and filename.endswith(".jsonl") or filename and filename.endswith(".xlsx"): - return cast(R, HttpxTextBinaryResponseContent(response)) - else: - return cast(R, cast_type(response)) # type: ignore - - if origin == LegacyAPIResponse: - raise RuntimeError("Unexpected state - cast_type is `APIResponse`") - - if inspect.isclass(origin) and issubclass(origin, httpx.Response): - # Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response - # and pass that class to our request functions. We cannot change the variance to be either - # covariant or contravariant as that makes our usage of ResponseT illegal. We could construct - # the response class ourselves but that is something that should be supported directly in httpx - # as it would be easy to incorrectly construct the Response object due to the multitude of arguments. - if cast_type != httpx.Response: - raise ValueError("Subclasses of httpx.Response cannot be passed to `cast_type`") - return cast(R, response) - - if inspect.isclass(origin) and not issubclass(origin, BaseModel) and issubclass(origin, pydantic.BaseModel): - raise TypeError("Pydantic models must subclass our base model type, e.g. `from openai import BaseModel`") - - if ( - cast_type is not object - and origin is not list - and origin is not dict - and origin is not Union - and not issubclass(origin, BaseModel) - ): - raise RuntimeError( - f"Unsupported type, expected {cast_type} to be a subclass of {BaseModel}, {dict}, {list}, {Union}, {NoneType}, {str} or {httpx.Response}." # noqa: E501 - ) - - # split is required to handle cases where additional information is included - # in the response, e.g. application/json; charset=utf-8 - content_type, *_ = response.headers.get("content-type", "*").split(";") - if content_type != "application/json": - if is_basemodel(cast_type): - try: - data = response.json() - except Exception as exc: - log.debug("Could not read JSON from response data due to %s - %s", type(exc), exc) - else: - return self._client._process_response_data( - data=data, - cast_type=cast_type, # type: ignore - response=response, - ) - - if self._client._strict_response_validation: - raise APIResponseValidationError( - response=response, - message=f"Expected Content-Type response header to be `application/json` but received `{content_type}` instead.", # noqa: E501 - json_data=response.text, - ) - - # If the API responds with content that isn't JSON then we just return - # the (decoded) text without performing any parsing so that you can still - # handle the response however you need to. - return response.text # type: ignore - - data = response.json() - - return self._client._process_response_data( - data=data, - cast_type=cast_type, # type: ignore - response=response, - ) - - @override - def __repr__(self) -> str: - return f"" - - -class MissingStreamClassError(TypeError): - def __init__(self) -> None: - super().__init__( - "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `openai._streaming` for reference", # noqa: E501 - ) - - -def to_raw_response_wrapper(func: Callable[P, R]) -> Callable[P, LegacyAPIResponse[R]]: - """Higher order function that takes one of our bound API methods and wraps it - to support returning the raw `APIResponse` object directly. - """ - - @functools.wraps(func) - def wrapped(*args: P.args, **kwargs: P.kwargs) -> LegacyAPIResponse[R]: - extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} - extra_headers[RAW_RESPONSE_HEADER] = "true" - - kwargs["extra_headers"] = extra_headers - - return cast(LegacyAPIResponse[R], func(*args, **kwargs)) - - return wrapped diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_request_opt.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_request_opt.py deleted file mode 100644 index c3b894b3a3..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_request_opt.py +++ /dev/null @@ -1,97 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable -from typing import TYPE_CHECKING, Any, ClassVar, Union, cast - -import pydantic.generics -from httpx import Timeout -from typing_extensions import Required, TypedDict, Unpack, final - -from ._base_compat import PYDANTIC_V2, ConfigDict -from ._base_type import AnyMapping, Body, Headers, HttpxRequestFiles, NotGiven, Query -from ._constants import RAW_RESPONSE_HEADER -from ._utils import is_given, strip_not_given - - -class UserRequestInput(TypedDict, total=False): - headers: Headers - max_retries: int - timeout: float | Timeout | None - params: Query - extra_json: AnyMapping - - -class FinalRequestOptionsInput(TypedDict, total=False): - method: Required[str] - url: Required[str] - params: Query - headers: Headers - max_retries: int - timeout: float | Timeout | None - files: HttpxRequestFiles | None - json_data: Body - extra_json: AnyMapping - - -@final -class FinalRequestOptions(pydantic.BaseModel): - method: str - url: str - params: Query = {} - headers: Union[Headers, NotGiven] = NotGiven() - max_retries: Union[int, NotGiven] = NotGiven() - timeout: Union[float, Timeout, None, NotGiven] = NotGiven() - files: Union[HttpxRequestFiles, None] = None - idempotency_key: Union[str, None] = None - post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven() - - # It should be noted that we cannot use `json` here as that would override - # a BaseModel method in an incompatible fashion. - json_data: Union[Body, None] = None - extra_json: Union[AnyMapping, None] = None - - if PYDANTIC_V2: - model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True) - else: - - class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] - arbitrary_types_allowed: bool = True - - def get_max_retries(self, max_retries: int) -> int: - if isinstance(self.max_retries, NotGiven): - return max_retries - return self.max_retries - - def _strip_raw_response_header(self) -> None: - if not is_given(self.headers): - return - - if self.headers.get(RAW_RESPONSE_HEADER): - self.headers = {**self.headers} - self.headers.pop(RAW_RESPONSE_HEADER) - - # override the `construct` method so that we can run custom transformations. - # this is necessary as we don't want to do any actual runtime type checking - # (which means we can't use validators) but we do want to ensure that `NotGiven` - # values are not present - # - # type ignore required because we're adding explicit types to `**values` - @classmethod - def construct( # type: ignore - cls, - _fields_set: set[str] | None = None, - **values: Unpack[UserRequestInput], - ) -> FinalRequestOptions: - kwargs: dict[str, Any] = { - # we unconditionally call `strip_not_given` on any value - # as it will just ignore any non-mapping types - key: strip_not_given(value) - for key, value in values.items() - } - if PYDANTIC_V2: - return super().model_construct(_fields_set, **kwargs) - return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated] - - if not TYPE_CHECKING: - # type checkers incorrectly complain about this assignment - model_construct = construct diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_response.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_response.py deleted file mode 100644 index 92e6018055..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_response.py +++ /dev/null @@ -1,398 +0,0 @@ -from __future__ import annotations - -import datetime -import inspect -import logging -from collections.abc import Iterator -from typing import TYPE_CHECKING, Any, Generic, TypeVar, Union, cast, get_origin, overload - -import httpx -import pydantic -from typing_extensions import ParamSpec, override - -from ._base_models import BaseModel, is_basemodel -from ._base_type import NoneType -from ._errors import APIResponseValidationError, ZhipuAIError -from ._sse_client import StreamResponse, extract_stream_chunk_type, is_stream_class_type -from ._utils import extract_type_arg, extract_type_var_from_base, is_annotated_type, is_given - -if TYPE_CHECKING: - from ._http_client import HttpClient - from ._request_opt import FinalRequestOptions - -P = ParamSpec("P") -R = TypeVar("R") -_T = TypeVar("_T") -_APIResponseT = TypeVar("_APIResponseT", bound="APIResponse[Any]") -log: logging.Logger = logging.getLogger(__name__) - - -class BaseAPIResponse(Generic[R]): - _cast_type: type[R] - _client: HttpClient - _parsed_by_type: dict[type[Any], Any] - _is_sse_stream: bool - _stream_cls: type[StreamResponse[Any]] - _options: FinalRequestOptions - http_response: httpx.Response - - def __init__( - self, - *, - raw: httpx.Response, - cast_type: type[R], - client: HttpClient, - stream: bool, - stream_cls: type[StreamResponse[Any]] | None = None, - options: FinalRequestOptions, - ) -> None: - self._cast_type = cast_type - self._client = client - self._parsed_by_type = {} - self._is_sse_stream = stream - self._stream_cls = stream_cls - self._options = options - self.http_response = raw - - def _parse(self, *, to: type[_T] | None = None) -> R | _T: - # unwrap `Annotated[T, ...]` -> `T` - if to and is_annotated_type(to): - to = extract_type_arg(to, 0) - - if self._is_sse_stream: - if to: - if not is_stream_class_type(to): - raise TypeError(f"Expected custom parse type to be a subclass of {StreamResponse}") - - return cast( - _T, - to( - cast_type=extract_stream_chunk_type( - to, - failure_message="Expected custom stream type to be passed with a type argument, e.g. StreamResponse[ChunkType]", # noqa: E501 - ), - response=self.http_response, - client=cast(Any, self._client), - ), - ) - - if self._stream_cls: - return cast( - R, - self._stream_cls( - cast_type=extract_stream_chunk_type(self._stream_cls), - response=self.http_response, - client=cast(Any, self._client), - ), - ) - - stream_cls = cast("type[Stream[Any]] | None", self._client._default_stream_cls) - if stream_cls is None: - raise MissingStreamClassError() - - return cast( - R, - stream_cls( - cast_type=self._cast_type, - response=self.http_response, - client=cast(Any, self._client), - ), - ) - - cast_type = to if to is not None else self._cast_type - - # unwrap `Annotated[T, ...]` -> `T` - if is_annotated_type(cast_type): - cast_type = extract_type_arg(cast_type, 0) - - if cast_type is NoneType: - return cast(R, None) - - response = self.http_response - if cast_type == str: - return cast(R, response.text) - - if cast_type == bytes: - return cast(R, response.content) - - if cast_type == int: - return cast(R, int(response.text)) - - if cast_type == float: - return cast(R, float(response.text)) - - origin = get_origin(cast_type) or cast_type - - # handle the legacy binary response case - if inspect.isclass(cast_type) and cast_type.__name__ == "HttpxBinaryResponseContent": - return cast(R, cast_type(response)) # type: ignore - - if origin == APIResponse: - raise RuntimeError("Unexpected state - cast_type is `APIResponse`") - - if inspect.isclass(origin) and issubclass(origin, httpx.Response): - # Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response - # and pass that class to our request functions. We cannot change the variance to be either - # covariant or contravariant as that makes our usage of ResponseT illegal. We could construct - # the response class ourselves but that is something that should be supported directly in httpx - # as it would be easy to incorrectly construct the Response object due to the multitude of arguments. - if cast_type != httpx.Response: - raise ValueError("Subclasses of httpx.Response cannot be passed to `cast_type`") - return cast(R, response) - - if inspect.isclass(origin) and not issubclass(origin, BaseModel) and issubclass(origin, pydantic.BaseModel): - raise TypeError("Pydantic models must subclass our base model type, e.g. `from openai import BaseModel`") - - if ( - cast_type is not object - and origin is not list - and origin is not dict - and origin is not Union - and not issubclass(origin, BaseModel) - ): - raise RuntimeError( - f"Unsupported type, expected {cast_type} to be a subclass of {BaseModel}, {dict}, {list}, {Union}, {NoneType}, {str} or {httpx.Response}." # noqa: E501 - ) - - # split is required to handle cases where additional information is included - # in the response, e.g. application/json; charset=utf-8 - content_type, *_ = response.headers.get("content-type", "*").split(";") - if content_type != "application/json": - if is_basemodel(cast_type): - try: - data = response.json() - except Exception as exc: - log.debug("Could not read JSON from response data due to %s - %s", type(exc), exc) - else: - return self._client._process_response_data( - data=data, - cast_type=cast_type, # type: ignore - response=response, - ) - - if self._client._strict_response_validation: - raise APIResponseValidationError( - response=response, - message=f"Expected Content-Type response header to be `application/json` but received `{content_type}` instead.", # noqa: E501 - json_data=response.text, - ) - - # If the API responds with content that isn't JSON then we just return - # the (decoded) text without performing any parsing so that you can still - # handle the response however you need to. - return response.text # type: ignore - - data = response.json() - - return self._client._process_response_data( - data=data, - cast_type=cast_type, # type: ignore - response=response, - ) - - @property - def headers(self) -> httpx.Headers: - return self.http_response.headers - - @property - def http_request(self) -> httpx.Request: - """Returns the httpx Request instance associated with the current response.""" - return self.http_response.request - - @property - def status_code(self) -> int: - return self.http_response.status_code - - @property - def url(self) -> httpx.URL: - """Returns the URL for which the request was made.""" - return self.http_response.url - - @property - def method(self) -> str: - return self.http_request.method - - @property - def http_version(self) -> str: - return self.http_response.http_version - - @property - def elapsed(self) -> datetime.timedelta: - """The time taken for the complete request/response cycle to complete.""" - return self.http_response.elapsed - - @property - def is_closed(self) -> bool: - """Whether or not the response body has been closed. - - If this is False then there is response data that has not been read yet. - You must either fully consume the response body or call `.close()` - before discarding the response to prevent resource leaks. - """ - return self.http_response.is_closed - - @override - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.status_code} {self.http_response.reason_phrase}] type={self._cast_type}>" # noqa: E501 - - -class APIResponse(BaseAPIResponse[R]): - @property - def request_id(self) -> str | None: - return self.http_response.headers.get("x-request-id") # type: ignore[no-any-return] - - @overload - def parse(self, *, to: type[_T]) -> _T: ... - - @overload - def parse(self) -> R: ... - - def parse(self, *, to: type[_T] | None = None) -> R | _T: - """Returns the rich python representation of this response's data. - - For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`. - - You can customize the type that the response is parsed into through - the `to` argument, e.g. - - ```py - from openai import BaseModel - - - class MyModel(BaseModel): - foo: str - - - obj = response.parse(to=MyModel) - print(obj.foo) - ``` - - We support parsing: - - `BaseModel` - - `dict` - - `list` - - `Union` - - `str` - - `int` - - `float` - - `httpx.Response` - """ - cache_key = to if to is not None else self._cast_type - cached = self._parsed_by_type.get(cache_key) - if cached is not None: - return cached # type: ignore[no-any-return] - - if not self._is_sse_stream: - self.read() - - parsed = self._parse(to=to) - if is_given(self._options.post_parser): - parsed = self._options.post_parser(parsed) - - self._parsed_by_type[cache_key] = parsed - return parsed - - def read(self) -> bytes: - """Read and return the binary response content.""" - try: - return self.http_response.read() - except httpx.StreamConsumed as exc: - # The default error raised by httpx isn't very - # helpful in our case so we re-raise it with - # a different error message. - raise StreamAlreadyConsumed() from exc - - def text(self) -> str: - """Read and decode the response content into a string.""" - self.read() - return self.http_response.text - - def json(self) -> object: - """Read and decode the JSON response content.""" - self.read() - return self.http_response.json() - - def close(self) -> None: - """Close the response and release the connection. - - Automatically called if the response body is read to completion. - """ - self.http_response.close() - - def iter_bytes(self, chunk_size: int | None = None) -> Iterator[bytes]: - """ - A byte-iterator over the decoded response content. - - This automatically handles gzip, deflate and brotli encoded responses. - """ - yield from self.http_response.iter_bytes(chunk_size) - - def iter_text(self, chunk_size: int | None = None) -> Iterator[str]: - """A str-iterator over the decoded response content - that handles both gzip, deflate, etc but also detects the content's - string encoding. - """ - yield from self.http_response.iter_text(chunk_size) - - def iter_lines(self) -> Iterator[str]: - """Like `iter_text()` but will only yield chunks for each line""" - yield from self.http_response.iter_lines() - - -class MissingStreamClassError(TypeError): - def __init__(self) -> None: - super().__init__( - "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `openai._streaming` for reference", # noqa: E501 - ) - - -class StreamAlreadyConsumed(ZhipuAIError): # noqa: N818 - """ - Attempted to read or stream content, but the content has already - been streamed. - - This can happen if you use a method like `.iter_lines()` and then attempt - to read th entire response body afterwards, e.g. - - ```py - response = await client.post(...) - async for line in response.iter_lines(): - ... # do something with `line` - - content = await response.read() - # ^ error - ``` - - If you want this behavior you'll need to either manually accumulate the response - content or call `await response.read()` before iterating over the stream. - """ - - def __init__(self) -> None: - message = ( - "Attempted to read or stream some content, but the content has " - "already been streamed. " - "This could be due to attempting to stream the response " - "content more than once." - "\n\n" - "You can fix this by manually accumulating the response content while streaming " - "or by calling `.read()` before starting to stream." - ) - super().__init__(message) - - -def extract_response_type(typ: type[BaseAPIResponse[Any]]) -> type: - """Given a type like `APIResponse[T]`, returns the generic type variable `T`. - - This also handles the case where a concrete subclass is given, e.g. - ```py - class MyResponse(APIResponse[bytes]): - ... - - extract_response_type(MyResponse) -> bytes - ``` - """ - return extract_type_var_from_base( - typ, - generic_bases=cast("tuple[type, ...]", (BaseAPIResponse, APIResponse)), - index=0, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_sse_client.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_sse_client.py deleted file mode 100644 index cbc449d244..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_sse_client.py +++ /dev/null @@ -1,206 +0,0 @@ -from __future__ import annotations - -import inspect -import json -from collections.abc import Iterator, Mapping -from typing import TYPE_CHECKING, Generic, TypeGuard, cast - -import httpx - -from . import get_origin -from ._base_type import ResponseT -from ._errors import APIResponseError -from ._utils import extract_type_var_from_base, is_mapping - -_FIELD_SEPARATOR = ":" - -if TYPE_CHECKING: - from ._http_client import HttpClient - - -class StreamResponse(Generic[ResponseT]): - response: httpx.Response - _cast_type: type[ResponseT] - - def __init__( - self, - *, - cast_type: type[ResponseT], - response: httpx.Response, - client: HttpClient, - ) -> None: - self.response = response - self._cast_type = cast_type - self._data_process_func = client._process_response_data - self._stream_chunks = self.__stream__() - - def __next__(self) -> ResponseT: - return self._stream_chunks.__next__() - - def __iter__(self) -> Iterator[ResponseT]: - yield from self._stream_chunks - - def __stream__(self) -> Iterator[ResponseT]: - sse_line_parser = SSELineParser() - iterator = sse_line_parser.iter_lines(self.response.iter_lines()) - - for sse in iterator: - if sse.data.startswith("[DONE]"): - break - - if sse.event is None: - data = sse.json_data() - if isinstance(data, Mapping) and data.get("error"): - raise APIResponseError( - message="An error occurred during streaming", - request=self.response.request, - json_data=data["error"], - ) - if sse.event is None: - data = sse.json_data() - if is_mapping(data) and data.get("error"): - message = None - error = data.get("error") - if is_mapping(error): - message = error.get("message") - if not message or not isinstance(message, str): - message = "An error occurred during streaming" - - raise APIResponseError( - message=message, - request=self.response.request, - json_data=data["error"], - ) - yield self._data_process_func(data=data, cast_type=self._cast_type, response=self.response) - - else: - data = sse.json_data() - - if sse.event == "error" and is_mapping(data) and data.get("error"): - message = None - error = data.get("error") - if is_mapping(error): - message = error.get("message") - if not message or not isinstance(message, str): - message = "An error occurred during streaming" - - raise APIResponseError( - message=message, - request=self.response.request, - json_data=data["error"], - ) - yield self._data_process_func(data=data, cast_type=self._cast_type, response=self.response) - - for sse in iterator: - pass - - -class Event: - def __init__( - self, event: str | None = None, data: str | None = None, id: str | None = None, retry: int | None = None - ): - self._event = event - self._data = data - self._id = id - self._retry = retry - - def __repr__(self): - data_len = len(self._data) if self._data else 0 - return ( - f"Event(event={self._event}, data={self._data} ,data_length={data_len}, id={self._id}, retry={self._retry}" - ) - - @property - def event(self): - return self._event - - @property - def data(self): - return self._data - - def json_data(self): - return json.loads(self._data) - - @property - def id(self): - return self._id - - @property - def retry(self): - return self._retry - - -class SSELineParser: - _data: list[str] - _event: str | None - _retry: int | None - _id: str | None - - def __init__(self): - self._event = None - self._data = [] - self._id = None - self._retry = None - - def iter_lines(self, lines: Iterator[str]) -> Iterator[Event]: - for line in lines: - line = line.rstrip("\n") - if not line: - if self._event is None and not self._data and self._id is None and self._retry is None: - continue - sse_event = Event(event=self._event, data="\n".join(self._data), id=self._id, retry=self._retry) - self._event = None - self._data = [] - self._id = None - self._retry = None - - yield sse_event - self.decode_line(line) - - def decode_line(self, line: str): - if line.startswith(":") or not line: - return - - field, _p, value = line.partition(":") - - value = value.removeprefix(" ") - if field == "data": - self._data.append(value) - elif field == "event": - self._event = value - elif field == "retry": - try: - self._retry = int(value) - except (TypeError, ValueError): - pass - return - - -def is_stream_class_type(typ: type) -> TypeGuard[type[StreamResponse[object]]]: - """TypeGuard for determining whether or not the given type is a subclass of `Stream` / `AsyncStream`""" - origin = get_origin(typ) or typ - return inspect.isclass(origin) and issubclass(origin, StreamResponse) - - -def extract_stream_chunk_type( - stream_cls: type, - *, - failure_message: str | None = None, -) -> type: - """Given a type like `StreamResponse[T]`, returns the generic type variable `T`. - - This also handles the case where a concrete subclass is given, e.g. - ```py - class MyStream(StreamResponse[bytes]): - ... - - extract_stream_chunk_type(MyStream) -> bytes - ``` - """ - - return extract_type_var_from_base( - stream_cls, - index=0, - generic_bases=cast("tuple[type, ...]", (StreamResponse,)), - failure_message=failure_message, - ) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/__init__.py deleted file mode 100644 index a66b095816..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/__init__.py +++ /dev/null @@ -1,52 +0,0 @@ -from ._utils import ( # noqa: I001 - remove_notgiven_indict as remove_notgiven_indict, # noqa: PLC0414 - flatten as flatten, # noqa: PLC0414 - is_dict as is_dict, # noqa: PLC0414 - is_list as is_list, # noqa: PLC0414 - is_given as is_given, # noqa: PLC0414 - is_tuple as is_tuple, # noqa: PLC0414 - is_mapping as is_mapping, # noqa: PLC0414 - is_tuple_t as is_tuple_t, # noqa: PLC0414 - parse_date as parse_date, # noqa: PLC0414 - is_iterable as is_iterable, # noqa: PLC0414 - is_sequence as is_sequence, # noqa: PLC0414 - coerce_float as coerce_float, # noqa: PLC0414 - is_mapping_t as is_mapping_t, # noqa: PLC0414 - removeprefix as removeprefix, # noqa: PLC0414 - removesuffix as removesuffix, # noqa: PLC0414 - extract_files as extract_files, # noqa: PLC0414 - is_sequence_t as is_sequence_t, # noqa: PLC0414 - required_args as required_args, # noqa: PLC0414 - coerce_boolean as coerce_boolean, # noqa: PLC0414 - coerce_integer as coerce_integer, # noqa: PLC0414 - file_from_path as file_from_path, # noqa: PLC0414 - parse_datetime as parse_datetime, # noqa: PLC0414 - strip_not_given as strip_not_given, # noqa: PLC0414 - deepcopy_minimal as deepcopy_minimal, # noqa: PLC0414 - get_async_library as get_async_library, # noqa: PLC0414 - maybe_coerce_float as maybe_coerce_float, # noqa: PLC0414 - get_required_header as get_required_header, # noqa: PLC0414 - maybe_coerce_boolean as maybe_coerce_boolean, # noqa: PLC0414 - maybe_coerce_integer as maybe_coerce_integer, # noqa: PLC0414 - drop_prefix_image_data as drop_prefix_image_data, # noqa: PLC0414 -) - - -from ._typing import ( - is_list_type as is_list_type, # noqa: PLC0414 - is_union_type as is_union_type, # noqa: PLC0414 - extract_type_arg as extract_type_arg, # noqa: PLC0414 - is_iterable_type as is_iterable_type, # noqa: PLC0414 - is_required_type as is_required_type, # noqa: PLC0414 - is_annotated_type as is_annotated_type, # noqa: PLC0414 - strip_annotated_type as strip_annotated_type, # noqa: PLC0414 - extract_type_var_from_base as extract_type_var_from_base, # noqa: PLC0414 -) - -from ._transform import ( - PropertyInfo as PropertyInfo, # noqa: PLC0414 - transform as transform, # noqa: PLC0414 - async_transform as async_transform, # noqa: PLC0414 - maybe_transform as maybe_transform, # noqa: PLC0414 - async_maybe_transform as async_maybe_transform, # noqa: PLC0414 -) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_transform.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_transform.py deleted file mode 100644 index e8ef1f7935..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_transform.py +++ /dev/null @@ -1,383 +0,0 @@ -from __future__ import annotations - -import base64 -import io -import pathlib -from collections.abc import Mapping -from datetime import date, datetime -from typing import Any, Literal, TypeVar, cast, get_args, get_type_hints - -import anyio -import pydantic -from typing_extensions import override - -from .._base_compat import is_typeddict, model_dump -from .._files import is_base64_file_input -from ._typing import ( - extract_type_arg, - is_annotated_type, - is_iterable_type, - is_list_type, - is_required_type, - is_union_type, - strip_annotated_type, -) -from ._utils import ( - is_iterable, - is_list, - is_mapping, -) - -_T = TypeVar("_T") - - -# TODO: support for drilling globals() and locals() -# TODO: ensure works correctly with forward references in all cases - - -PropertyFormat = Literal["iso8601", "base64", "custom"] - - -class PropertyInfo: - """Metadata class to be used in Annotated types to provide information about a given type. - - For example: - - class MyParams(TypedDict): - account_holder_name: Annotated[str, PropertyInfo(alias='accountHolderName')] - - This means that {'account_holder_name': 'Robert'} will be transformed to {'accountHolderName': 'Robert'} before being sent to the API. - """ # noqa: E501 - - alias: str | None - format: PropertyFormat | None - format_template: str | None - discriminator: str | None - - def __init__( - self, - *, - alias: str | None = None, - format: PropertyFormat | None = None, - format_template: str | None = None, - discriminator: str | None = None, - ) -> None: - self.alias = alias - self.format = format - self.format_template = format_template - self.discriminator = discriminator - - @override - def __repr__(self) -> str: - return f"{self.__class__.__name__}(alias='{self.alias}', format={self.format}, format_template='{self.format_template}', discriminator='{self.discriminator}')" # noqa: E501 - - -def maybe_transform( - data: object, - expected_type: object, -) -> Any | None: - """Wrapper over `transform()` that allows `None` to be passed. - - See `transform()` for more details. - """ - if data is None: - return None - return transform(data, expected_type) - - -# Wrapper over _transform_recursive providing fake types -def transform( - data: _T, - expected_type: object, -) -> _T: - """Transform dictionaries based off of type information from the given type, for example: - - ```py - class Params(TypedDict, total=False): - card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]] - - - transformed = transform({"card_id": ""}, Params) - # {'cardID': ''} - ``` - - Any keys / data that does not have type information given will be included as is. - - It should be noted that the transformations that this function does are not represented in the type system. - """ - transformed = _transform_recursive(data, annotation=cast(type, expected_type)) - return cast(_T, transformed) - - -def _get_annotated_type(type_: type) -> type | None: - """If the given type is an `Annotated` type then it is returned, if not `None` is returned. - - This also unwraps the type when applicable, e.g. `Required[Annotated[T, ...]]` - """ - if is_required_type(type_): - # Unwrap `Required[Annotated[T, ...]]` to `Annotated[T, ...]` - type_ = get_args(type_)[0] - - if is_annotated_type(type_): - return type_ - - return None - - -def _maybe_transform_key(key: str, type_: type) -> str: - """Transform the given `data` based on the annotations provided in `type_`. - - Note: this function only looks at `Annotated` types that contain `PropertInfo` metadata. - """ - annotated_type = _get_annotated_type(type_) - if annotated_type is None: - # no `Annotated` definition for this type, no transformation needed - return key - - # ignore the first argument as it is the actual type - annotations = get_args(annotated_type)[1:] - for annotation in annotations: - if isinstance(annotation, PropertyInfo) and annotation.alias is not None: - return annotation.alias - - return key - - -def _transform_recursive( - data: object, - *, - annotation: type, - inner_type: type | None = None, -) -> object: - """Transform the given data against the expected type. - - Args: - annotation: The direct type annotation given to the particular piece of data. - This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc - - inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type - is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in - the list can be transformed using the metadata from the container type. - - Defaults to the same value as the `annotation` argument. - """ - if inner_type is None: - inner_type = annotation - - stripped_type = strip_annotated_type(inner_type) - if is_typeddict(stripped_type) and is_mapping(data): - return _transform_typeddict(data, stripped_type) - - if ( - # List[T] - (is_list_type(stripped_type) and is_list(data)) - # Iterable[T] - or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) - ): - inner_type = extract_type_arg(stripped_type, 0) - return [_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] - - if is_union_type(stripped_type): - # For union types we run the transformation against all subtypes to ensure that everything is transformed. - # - # TODO: there may be edge cases where the same normalized field name will transform to two different names - # in different subtypes. - for subtype in get_args(stripped_type): - data = _transform_recursive(data, annotation=annotation, inner_type=subtype) - return data - - if isinstance(data, pydantic.BaseModel): - return model_dump(data, exclude_unset=True) - - annotated_type = _get_annotated_type(annotation) - if annotated_type is None: - return data - - # ignore the first argument as it is the actual type - annotations = get_args(annotated_type)[1:] - for annotation in annotations: - if isinstance(annotation, PropertyInfo) and annotation.format is not None: - return _format_data(data, annotation.format, annotation.format_template) - - return data - - -def _format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object: - if isinstance(data, date | datetime): - if format_ == "iso8601": - return data.isoformat() - - if format_ == "custom" and format_template is not None: - return data.strftime(format_template) - - if format_ == "base64" and is_base64_file_input(data): - binary: str | bytes | None = None - - if isinstance(data, pathlib.Path): - binary = data.read_bytes() - elif isinstance(data, io.IOBase): - binary = data.read() - - if isinstance(binary, str): # type: ignore[unreachable] - binary = binary.encode() - - if not isinstance(binary, bytes): - raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}") - - return base64.b64encode(binary).decode("ascii") - - return data - - -def _transform_typeddict( - data: Mapping[str, object], - expected_type: type, -) -> Mapping[str, object]: - result: dict[str, object] = {} - annotations = get_type_hints(expected_type, include_extras=True) - for key, value in data.items(): - type_ = annotations.get(key) - if type_ is None: - # we do not have a type annotation for this field, leave it as is - result[key] = value - else: - result[_maybe_transform_key(key, type_)] = _transform_recursive(value, annotation=type_) - return result - - -async def async_maybe_transform( - data: object, - expected_type: object, -) -> Any | None: - """Wrapper over `async_transform()` that allows `None` to be passed. - - See `async_transform()` for more details. - """ - if data is None: - return None - return await async_transform(data, expected_type) - - -async def async_transform( - data: _T, - expected_type: object, -) -> _T: - """Transform dictionaries based off of type information from the given type, for example: - - ```py - class Params(TypedDict, total=False): - card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]] - - - transformed = transform({"card_id": ""}, Params) - # {'cardID': ''} - ``` - - Any keys / data that does not have type information given will be included as is. - - It should be noted that the transformations that this function does are not represented in the type system. - """ - transformed = await _async_transform_recursive(data, annotation=cast(type, expected_type)) - return cast(_T, transformed) - - -async def _async_transform_recursive( - data: object, - *, - annotation: type, - inner_type: type | None = None, -) -> object: - """Transform the given data against the expected type. - - Args: - annotation: The direct type annotation given to the particular piece of data. - This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc - - inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type - is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in - the list can be transformed using the metadata from the container type. - - Defaults to the same value as the `annotation` argument. - """ - if inner_type is None: - inner_type = annotation - - stripped_type = strip_annotated_type(inner_type) - if is_typeddict(stripped_type) and is_mapping(data): - return await _async_transform_typeddict(data, stripped_type) - - if ( - # List[T] - (is_list_type(stripped_type) and is_list(data)) - # Iterable[T] - or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) - ): - inner_type = extract_type_arg(stripped_type, 0) - return [await _async_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] - - if is_union_type(stripped_type): - # For union types we run the transformation against all subtypes to ensure that everything is transformed. - # - # TODO: there may be edge cases where the same normalized field name will transform to two different names - # in different subtypes. - for subtype in get_args(stripped_type): - data = await _async_transform_recursive(data, annotation=annotation, inner_type=subtype) - return data - - if isinstance(data, pydantic.BaseModel): - return model_dump(data, exclude_unset=True) - - annotated_type = _get_annotated_type(annotation) - if annotated_type is None: - return data - - # ignore the first argument as it is the actual type - annotations = get_args(annotated_type)[1:] - for annotation in annotations: - if isinstance(annotation, PropertyInfo) and annotation.format is not None: - return await _async_format_data(data, annotation.format, annotation.format_template) - - return data - - -async def _async_format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object: - if isinstance(data, date | datetime): - if format_ == "iso8601": - return data.isoformat() - - if format_ == "custom" and format_template is not None: - return data.strftime(format_template) - - if format_ == "base64" and is_base64_file_input(data): - binary: str | bytes | None = None - - if isinstance(data, pathlib.Path): - binary = await anyio.Path(data).read_bytes() - elif isinstance(data, io.IOBase): - binary = data.read() - - if isinstance(binary, str): # type: ignore[unreachable] - binary = binary.encode() - - if not isinstance(binary, bytes): - raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}") - - return base64.b64encode(binary).decode("ascii") - - return data - - -async def _async_transform_typeddict( - data: Mapping[str, object], - expected_type: type, -) -> Mapping[str, object]: - result: dict[str, object] = {} - annotations = get_type_hints(expected_type, include_extras=True) - for key, value in data.items(): - type_ = annotations.get(key) - if type_ is None: - # we do not have a type annotation for this field, leave it as is - result[key] = value - else: - result[_maybe_transform_key(key, type_)] = await _async_transform_recursive(value, annotation=type_) - return result diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_typing.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_typing.py deleted file mode 100644 index c7c54dcc37..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_typing.py +++ /dev/null @@ -1,122 +0,0 @@ -from __future__ import annotations - -from collections import abc as _c_abc -from collections.abc import Iterable -from typing import Annotated, Any, TypeVar, cast, get_args, get_origin - -from typing_extensions import Required - -from .._base_compat import is_union as _is_union -from .._base_type import InheritsGeneric - - -def is_annotated_type(typ: type) -> bool: - return get_origin(typ) == Annotated - - -def is_list_type(typ: type) -> bool: - return (get_origin(typ) or typ) == list - - -def is_iterable_type(typ: type) -> bool: - """If the given type is `typing.Iterable[T]`""" - origin = get_origin(typ) or typ - return origin in {Iterable, _c_abc.Iterable} - - -def is_union_type(typ: type) -> bool: - return _is_union(get_origin(typ)) - - -def is_required_type(typ: type) -> bool: - return get_origin(typ) == Required - - -def is_typevar(typ: type) -> bool: - # type ignore is required because type checkers - # think this expression will always return False - return type(typ) == TypeVar # type: ignore - - -# Extracts T from Annotated[T, ...] or from Required[Annotated[T, ...]] -def strip_annotated_type(typ: type) -> type: - if is_required_type(typ) or is_annotated_type(typ): - return strip_annotated_type(cast(type, get_args(typ)[0])) - - return typ - - -def extract_type_arg(typ: type, index: int) -> type: - args = get_args(typ) - try: - return cast(type, args[index]) - except IndexError as err: - raise RuntimeError(f"Expected type {typ} to have a type argument at index {index} but it did not") from err - - -def extract_type_var_from_base( - typ: type, - *, - generic_bases: tuple[type, ...], - index: int, - failure_message: str | None = None, -) -> type: - """Given a type like `Foo[T]`, returns the generic type variable `T`. - - This also handles the case where a concrete subclass is given, e.g. - ```py - class MyResponse(Foo[bytes]): - ... - - extract_type_var(MyResponse, bases=(Foo,), index=0) -> bytes - ``` - - And where a generic subclass is given: - ```py - _T = TypeVar('_T') - class MyResponse(Foo[_T]): - ... - - extract_type_var(MyResponse[bytes], bases=(Foo,), index=0) -> bytes - ``` - """ - cls = cast(object, get_origin(typ) or typ) - if cls in generic_bases: - # we're given the class directly - return extract_type_arg(typ, index) - - # if a subclass is given - # --- - # this is needed as __orig_bases__ is not present in the typeshed stubs - # because it is intended to be for internal use only, however there does - # not seem to be a way to resolve generic TypeVars for inherited subclasses - # without using it. - if isinstance(cls, InheritsGeneric): - target_base_class: Any | None = None - for base in cls.__orig_bases__: - if base.__origin__ in generic_bases: - target_base_class = base - break - - if target_base_class is None: - raise RuntimeError( - "Could not find the generic base class;\n" - "This should never happen;\n" - f"Does {cls} inherit from one of {generic_bases} ?" - ) - - extracted = extract_type_arg(target_base_class, index) - if is_typevar(extracted): - # If the extracted type argument is itself a type variable - # then that means the subclass itself is generic, so we have - # to resolve the type argument from the class itself, not - # the base class. - # - # Note: if there is more than 1 type argument, the subclass could - # change the ordering of the type arguments, this is not currently - # supported. - return extract_type_arg(typ, index) - - return extracted - - raise RuntimeError(failure_message or f"Could not resolve inner type variable at index {index} for {typ}") diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_utils.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_utils.py deleted file mode 100644 index 3a7b234ab0..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_utils.py +++ /dev/null @@ -1,409 +0,0 @@ -from __future__ import annotations - -import functools -import inspect -import os -import re -from collections.abc import Callable, Iterable, Mapping, Sequence -from pathlib import Path -from typing import ( - Any, - TypeGuard, - TypeVar, - Union, - cast, - overload, -) - -import sniffio - -from .._base_compat import parse_date as parse_date # noqa: PLC0414 -from .._base_compat import parse_datetime as parse_datetime # noqa: PLC0414 -from .._base_type import FileTypes, Headers, HeadersLike, NotGiven, NotGivenOr - - -def remove_notgiven_indict(obj): - if obj is None or (not isinstance(obj, Mapping)): - return obj - return {key: value for key, value in obj.items() if not isinstance(value, NotGiven)} - - -_T = TypeVar("_T") -_TupleT = TypeVar("_TupleT", bound=tuple[object, ...]) -_MappingT = TypeVar("_MappingT", bound=Mapping[str, object]) -_SequenceT = TypeVar("_SequenceT", bound=Sequence[object]) -CallableT = TypeVar("CallableT", bound=Callable[..., Any]) - - -def flatten(t: Iterable[Iterable[_T]]) -> list[_T]: - return [item for sublist in t for item in sublist] - - -def extract_files( - # TODO: this needs to take Dict but variance issues..... - # create protocol type ? - query: Mapping[str, object], - *, - paths: Sequence[Sequence[str]], -) -> list[tuple[str, FileTypes]]: - """Recursively extract files from the given dictionary based on specified paths. - - A path may look like this ['foo', 'files', '', 'data']. - - Note: this mutates the given dictionary. - """ - files: list[tuple[str, FileTypes]] = [] - for path in paths: - files.extend(_extract_items(query, path, index=0, flattened_key=None)) - return files - - -def _extract_items( - obj: object, - path: Sequence[str], - *, - index: int, - flattened_key: str | None, -) -> list[tuple[str, FileTypes]]: - try: - key = path[index] - except IndexError: - if isinstance(obj, NotGiven): - # no value was provided - we can safely ignore - return [] - - # cyclical import - from .._files import assert_is_file_content - - # We have exhausted the path, return the entry we found. - assert_is_file_content(obj, key=flattened_key) - assert flattened_key is not None - return [(flattened_key, cast(FileTypes, obj))] - - index += 1 - if is_dict(obj): - try: - # We are at the last entry in the path so we must remove the field - if (len(path)) == index: - item = obj.pop(key) - else: - item = obj[key] - except KeyError: - # Key was not present in the dictionary, this is not indicative of an error - # as the given path may not point to a required field. We also do not want - # to enforce required fields as the API may differ from the spec in some cases. - return [] - if flattened_key is None: - flattened_key = key - else: - flattened_key += f"[{key}]" - return _extract_items( - item, - path, - index=index, - flattened_key=flattened_key, - ) - elif is_list(obj): - if key != "": - return [] - - return flatten( - [ - _extract_items( - item, - path, - index=index, - flattened_key=flattened_key + "[]" if flattened_key is not None else "[]", - ) - for item in obj - ] - ) - - # Something unexpected was passed, just ignore it. - return [] - - -def is_given(obj: NotGivenOr[_T]) -> TypeGuard[_T]: - return not isinstance(obj, NotGiven) - - -# Type safe methods for narrowing types with TypeVars. -# The default narrowing for isinstance(obj, dict) is dict[unknown, unknown], -# however this cause Pyright to rightfully report errors. As we know we don't -# care about the contained types we can safely use `object` in it's place. -# -# There are two separate functions defined, `is_*` and `is_*_t` for different use cases. -# `is_*` is for when you're dealing with an unknown input -# `is_*_t` is for when you're narrowing a known union type to a specific subset - - -def is_tuple(obj: object) -> TypeGuard[tuple[object, ...]]: - return isinstance(obj, tuple) - - -def is_tuple_t(obj: _TupleT | object) -> TypeGuard[_TupleT]: - return isinstance(obj, tuple) - - -def is_sequence(obj: object) -> TypeGuard[Sequence[object]]: - return isinstance(obj, Sequence) - - -def is_sequence_t(obj: _SequenceT | object) -> TypeGuard[_SequenceT]: - return isinstance(obj, Sequence) - - -def is_mapping(obj: object) -> TypeGuard[Mapping[str, object]]: - return isinstance(obj, Mapping) - - -def is_mapping_t(obj: _MappingT | object) -> TypeGuard[_MappingT]: - return isinstance(obj, Mapping) - - -def is_dict(obj: object) -> TypeGuard[dict[object, object]]: - return isinstance(obj, dict) - - -def is_list(obj: object) -> TypeGuard[list[object]]: - return isinstance(obj, list) - - -def is_iterable(obj: object) -> TypeGuard[Iterable[object]]: - return isinstance(obj, Iterable) - - -def deepcopy_minimal(item: _T) -> _T: - """Minimal reimplementation of copy.deepcopy() that will only copy certain object types: - - - mappings, e.g. `dict` - - list - - This is done for performance reasons. - """ - if is_mapping(item): - return cast(_T, {k: deepcopy_minimal(v) for k, v in item.items()}) - if is_list(item): - return cast(_T, [deepcopy_minimal(entry) for entry in item]) - return item - - -# copied from https://github.com/Rapptz/RoboDanny -def human_join(seq: Sequence[str], *, delim: str = ", ", final: str = "or") -> str: - size = len(seq) - if size == 0: - return "" - - if size == 1: - return seq[0] - - if size == 2: - return f"{seq[0]} {final} {seq[1]}" - - return delim.join(seq[:-1]) + f" {final} {seq[-1]}" - - -def quote(string: str) -> str: - """Add single quotation marks around the given string. Does *not* do any escaping.""" - return f"'{string}'" - - -def required_args(*variants: Sequence[str]) -> Callable[[CallableT], CallableT]: - """Decorator to enforce a given set of arguments or variants of arguments are passed to the decorated function. - - Useful for enforcing runtime validation of overloaded functions. - - Example usage: - ```py - @overload - def foo(*, a: str) -> str: - ... - - - @overload - def foo(*, b: bool) -> str: - ... - - - # This enforces the same constraints that a static type checker would - # i.e. that either a or b must be passed to the function - @required_args(["a"], ["b"]) - def foo(*, a: str | None = None, b: bool | None = None) -> str: - ... - ``` - """ - - def inner(func: CallableT) -> CallableT: - params = inspect.signature(func).parameters - positional = [ - name - for name, param in params.items() - if param.kind - in { - param.POSITIONAL_ONLY, - param.POSITIONAL_OR_KEYWORD, - } - ] - - @functools.wraps(func) - def wrapper(*args: object, **kwargs: object) -> object: - given_params: set[str] = set() - for i in range(len(args)): - try: - given_params.add(positional[i]) - except IndexError: - raise TypeError( - f"{func.__name__}() takes {len(positional)} argument(s) but {len(args)} were given" - ) from None - - given_params.update(kwargs.keys()) - - for variant in variants: - matches = all(param in given_params for param in variant) - if matches: - break - else: # no break - if len(variants) > 1: - variations = human_join( - ["(" + human_join([quote(arg) for arg in variant], final="and") + ")" for variant in variants] - ) - msg = f"Missing required arguments; Expected either {variations} arguments to be given" - else: - # TODO: this error message is not deterministic - missing = list(set(variants[0]) - given_params) - if len(missing) > 1: - msg = f"Missing required arguments: {human_join([quote(arg) for arg in missing])}" - else: - msg = f"Missing required argument: {quote(missing[0])}" - raise TypeError(msg) - return func(*args, **kwargs) - - return wrapper # type: ignore - - return inner - - -_K = TypeVar("_K") -_V = TypeVar("_V") - - -@overload -def strip_not_given(obj: None) -> None: ... - - -@overload -def strip_not_given(obj: Mapping[_K, _V | NotGiven]) -> dict[_K, _V]: ... - - -@overload -def strip_not_given(obj: object) -> object: ... - - -def strip_not_given(obj: object | None) -> object: - """Remove all top-level keys where their values are instances of `NotGiven`""" - if obj is None: - return None - - if not is_mapping(obj): - return obj - - return {key: value for key, value in obj.items() if not isinstance(value, NotGiven)} - - -def coerce_integer(val: str) -> int: - return int(val, base=10) - - -def coerce_float(val: str) -> float: - return float(val) - - -def coerce_boolean(val: str) -> bool: - return val in {"true", "1", "on"} - - -def maybe_coerce_integer(val: str | None) -> int | None: - if val is None: - return None - return coerce_integer(val) - - -def maybe_coerce_float(val: str | None) -> float | None: - if val is None: - return None - return coerce_float(val) - - -def maybe_coerce_boolean(val: str | None) -> bool | None: - if val is None: - return None - return coerce_boolean(val) - - -def removeprefix(string: str, prefix: str) -> str: - """Remove a prefix from a string. - - Backport of `str.removeprefix` for Python < 3.9 - """ - if string.startswith(prefix): - return string[len(prefix) :] - return string - - -def removesuffix(string: str, suffix: str) -> str: - """Remove a suffix from a string. - - Backport of `str.removesuffix` for Python < 3.9 - """ - if string.endswith(suffix): - return string[: -len(suffix)] - return string - - -def file_from_path(path: str) -> FileTypes: - contents = Path(path).read_bytes() - file_name = os.path.basename(path) - return (file_name, contents) - - -def get_required_header(headers: HeadersLike, header: str) -> str: - lower_header = header.lower() - if isinstance(headers, Mapping): - headers = cast(Headers, headers) - for k, v in headers.items(): - if k.lower() == lower_header and isinstance(v, str): - return v - - """ to deal with the case where the header looks like Stainless-Event-Id """ - intercaps_header = re.sub(r"([^\w])(\w)", lambda pat: pat.group(1) + pat.group(2).upper(), header.capitalize()) - - for normalized_header in [header, lower_header, header.upper(), intercaps_header]: - value = headers.get(normalized_header) - if value: - return value - - raise ValueError(f"Could not find {header} header") - - -def get_async_library() -> str: - try: - return sniffio.current_async_library() - except Exception: - return "false" - - -def drop_prefix_image_data(content: Union[str, list[dict]]) -> Union[str, list[dict]]: - """ - 删除 ;base64, 前缀 - :param image_data: - :return: - """ - if isinstance(content, list): - for data in content: - if data.get("type") == "image_url": - image_data = data.get("image_url").get("url") - if image_data.startswith("data:image/"): - image_data = image_data.split("base64,")[-1] - data["image_url"]["url"] = image_data - - return content diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/logs.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/logs.py deleted file mode 100644 index e5fce94c00..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/logs.py +++ /dev/null @@ -1,78 +0,0 @@ -import logging -import os -import time - -logger = logging.getLogger(__name__) - - -class LoggerNameFilter(logging.Filter): - def filter(self, record): - # return record.name.startswith("loom_core") or record.name in "ERROR" or ( - # record.name.startswith("uvicorn.error") - # and record.getMessage().startswith("Uvicorn running on") - # ) - return True - - -def get_log_file(log_path: str, sub_dir: str): - """ - sub_dir should contain a timestamp. - """ - log_dir = os.path.join(log_path, sub_dir) - # Here should be creating a new directory each time, so `exist_ok=False` - os.makedirs(log_dir, exist_ok=False) - return os.path.join(log_dir, "zhipuai.log") - - -def get_config_dict(log_level: str, log_file_path: str, log_backup_count: int, log_max_bytes: int) -> dict: - # for windows, the path should be a raw string. - log_file_path = log_file_path.encode("unicode-escape").decode() if os.name == "nt" else log_file_path - log_level = log_level.upper() - config_dict = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "formatter": {"format": ("%(asctime)s %(name)-12s %(process)d %(levelname)-8s %(message)s")}, - }, - "filters": { - "logger_name_filter": { - "()": __name__ + ".LoggerNameFilter", - }, - }, - "handlers": { - "stream_handler": { - "class": "logging.StreamHandler", - "formatter": "formatter", - "level": log_level, - # "stream": "ext://sys.stdout", - # "filters": ["logger_name_filter"], - }, - "file_handler": { - "class": "logging.handlers.RotatingFileHandler", - "formatter": "formatter", - "level": log_level, - "filename": log_file_path, - "mode": "a", - "maxBytes": log_max_bytes, - "backupCount": log_backup_count, - "encoding": "utf8", - }, - }, - "loggers": { - "loom_core": { - "handlers": ["stream_handler", "file_handler"], - "level": log_level, - "propagate": False, - } - }, - "root": { - "level": log_level, - "handlers": ["stream_handler", "file_handler"], - }, - } - return config_dict - - -def get_timestamp_ms(): - t = time.time() - return int(round(t * 1000)) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/pagination.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/pagination.py deleted file mode 100644 index 7f0b1b91d9..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/pagination.py +++ /dev/null @@ -1,62 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Any, Generic, Optional, TypeVar, cast - -from typing_extensions import Protocol, override, runtime_checkable - -from ._http_client import BasePage, BaseSyncPage, PageInfo - -__all__ = ["SyncPage", "SyncCursorPage"] - -_T = TypeVar("_T") - - -@runtime_checkable -class CursorPageItem(Protocol): - id: Optional[str] - - -class SyncPage(BaseSyncPage[_T], BasePage[_T], Generic[_T]): - """Note: no pagination actually occurs yet, this is for forwards-compatibility.""" - - data: list[_T] - object: str - - @override - def _get_page_items(self) -> list[_T]: - data = self.data - if not data: - return [] - return data - - @override - def next_page_info(self) -> None: - """ - This page represents a response that isn't actually paginated at the API level - so there will never be a next page. - """ - return None - - -class SyncCursorPage(BaseSyncPage[_T], BasePage[_T], Generic[_T]): - data: list[_T] - - @override - def _get_page_items(self) -> list[_T]: - data = self.data - if not data: - return [] - return data - - @override - def next_page_info(self) -> Optional[PageInfo]: - data = self.data - if not data: - return None - - item = cast(Any, data[-1]) - if not isinstance(item, CursorPageItem) or item.id is None: - # TODO emit warning log - return None - - return PageInfo(params={"after": item.id}) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/__init__.py deleted file mode 100644 index 9f941fb91c..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .assistant_completion import AssistantCompletion - -__all__ = [ - "AssistantCompletion", -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_completion.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_completion.py deleted file mode 100644 index cbfb6edaeb..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_completion.py +++ /dev/null @@ -1,40 +0,0 @@ -from typing import Any, Optional - -from ...core import BaseModel -from .message import MessageContent - -__all__ = ["AssistantCompletion", "CompletionUsage"] - - -class ErrorInfo(BaseModel): - code: str # 错误码 - message: str # 错误信息 - - -class AssistantChoice(BaseModel): - index: int # 结果下标 - delta: MessageContent # 当前会话输出消息体 - finish_reason: str - """ - # 推理结束原因 stop代表推理自然结束或触发停止词。 sensitive 代表模型推理内容被安全审核接口拦截。请注意,针对此类内容,请用户自行判断并决定是否撤回已公开的内容。 - # network_error 代表模型推理服务异常。 - """ # noqa: E501 - metadata: dict # 元信息,拓展字段 - - -class CompletionUsage(BaseModel): - prompt_tokens: int # 输入的 tokens 数量 - completion_tokens: int # 输出的 tokens 数量 - total_tokens: int # 总 tokens 数量 - - -class AssistantCompletion(BaseModel): - id: str # 请求 ID - conversation_id: str # 会话 ID - assistant_id: str # 智能体 ID - created: int # 请求创建时间,Unix 时间戳 - status: str # 返回状态,包括:`completed` 表示生成结束`in_progress`表示生成中 `failed` 表示生成异常 - last_error: Optional[ErrorInfo] # 异常信息 - choices: list[AssistantChoice] # 增量返回的信息 - metadata: Optional[dict[str, Any]] # 元信息,拓展字段 - usage: Optional[CompletionUsage] # tokens 数量统计 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_conversation_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_conversation_params.py deleted file mode 100644 index 03f14f4238..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_conversation_params.py +++ /dev/null @@ -1,7 +0,0 @@ -from typing import TypedDict - - -class ConversationParameters(TypedDict, total=False): - assistant_id: str # 智能体 ID - page: int # 当前分页 - page_size: int # 分页数量 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_conversation_resp.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_conversation_resp.py deleted file mode 100644 index d1833d220a..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_conversation_resp.py +++ /dev/null @@ -1,29 +0,0 @@ -from ...core import BaseModel - -__all__ = ["ConversationUsageListResp"] - - -class Usage(BaseModel): - prompt_tokens: int # 用户输入的 tokens 数量 - completion_tokens: int # 模型输入的 tokens 数量 - total_tokens: int # 总 tokens 数量 - - -class ConversationUsage(BaseModel): - id: str # 会话 id - assistant_id: str # 智能体Assistant id - create_time: int # 创建时间 - update_time: int # 更新时间 - usage: Usage # 会话中 tokens 数量统计 - - -class ConversationUsageList(BaseModel): - assistant_id: str # 智能体id - has_more: bool # 是否还有更多页 - conversation_list: list[ConversationUsage] # 返回的 - - -class ConversationUsageListResp(BaseModel): - code: int - msg: str - data: ConversationUsageList diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_create_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_create_params.py deleted file mode 100644 index 2def1025cd..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_create_params.py +++ /dev/null @@ -1,32 +0,0 @@ -from typing import Optional, TypedDict, Union - - -class AssistantAttachments: - file_id: str - - -class MessageTextContent: - type: str # 目前支持 type = text - text: str - - -MessageContent = Union[MessageTextContent] - - -class ConversationMessage(TypedDict): - """会话消息体""" - - role: str # 用户的输入角色,例如 'user' - content: list[MessageContent] # 会话消息体的内容 - - -class AssistantParameters(TypedDict, total=False): - """智能体参数类""" - - assistant_id: str # 智能体 ID - conversation_id: Optional[str] # 会话 ID,不传则创建新会话 - model: str # 模型名称,默认为 'GLM-4-Assistant' - stream: bool # 是否支持流式 SSE,需要传入 True - messages: list[ConversationMessage] # 会话消息体 - attachments: Optional[list[AssistantAttachments]] # 会话指定的文件,非必填 - metadata: Optional[dict] # 元信息,拓展字段,非必填 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_support_resp.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_support_resp.py deleted file mode 100644 index 0709cdbcad..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/assistant_support_resp.py +++ /dev/null @@ -1,21 +0,0 @@ -from ...core import BaseModel - -__all__ = ["AssistantSupportResp"] - - -class AssistantSupport(BaseModel): - assistant_id: str # 智能体的 Assistant id,用于智能体会话 - created_at: int # 创建时间 - updated_at: int # 更新时间 - name: str # 智能体名称 - avatar: str # 智能体头像 - description: str # 智能体描述 - status: str # 智能体状态,目前只有 publish - tools: list[str] # 智能体支持的工具名 - starter_prompts: list[str] # 智能体启动推荐的 prompt - - -class AssistantSupportResp(BaseModel): - code: int - msg: str - data: list[AssistantSupport] # 智能体列表 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/__init__.py deleted file mode 100644 index 562e0151e5..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .message_content import MessageContent - -__all__ = ["MessageContent"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/message_content.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/message_content.py deleted file mode 100644 index 6a1a438a6f..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/message_content.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Annotated, TypeAlias, Union - -from ....core._utils import PropertyInfo -from .text_content_block import TextContentBlock -from .tools_delta_block import ToolsDeltaBlock - -__all__ = ["MessageContent"] - - -MessageContent: TypeAlias = Annotated[ - Union[ToolsDeltaBlock, TextContentBlock], - PropertyInfo(discriminator="type"), -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/text_content_block.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/text_content_block.py deleted file mode 100644 index 865fb1139e..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/text_content_block.py +++ /dev/null @@ -1,14 +0,0 @@ -from typing import Literal - -from ....core import BaseModel - -__all__ = ["TextContentBlock"] - - -class TextContentBlock(BaseModel): - content: str - - role: str = "assistant" - - type: Literal["content"] = "content" - """Always `content`.""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/code_interpreter_delta_block.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/code_interpreter_delta_block.py deleted file mode 100644 index 9d569b282e..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/code_interpreter_delta_block.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Literal - -__all__ = ["CodeInterpreterToolBlock"] - -from .....core import BaseModel - - -class CodeInterpreterToolOutput(BaseModel): - """代码工具输出结果""" - - type: str # 代码执行日志,目前只有 logs - logs: str # 代码执行的日志结果 - error_msg: str # 错误信息 - - -class CodeInterpreter(BaseModel): - """代码解释器""" - - input: str # 生成的代码片段,输入给代码沙盒 - outputs: list[CodeInterpreterToolOutput] # 代码执行后的输出结果 - - -class CodeInterpreterToolBlock(BaseModel): - """代码工具块""" - - code_interpreter: CodeInterpreter # 代码解释器对象 - type: Literal["code_interpreter"] # 调用工具的类型,始终为 `code_interpreter` diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/drawing_tool_delta_block.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/drawing_tool_delta_block.py deleted file mode 100644 index 0b6895556b..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/drawing_tool_delta_block.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import Literal - -from .....core import BaseModel - -__all__ = ["DrawingToolBlock"] - - -class DrawingToolOutput(BaseModel): - image: str - - -class DrawingTool(BaseModel): - input: str - outputs: list[DrawingToolOutput] - - -class DrawingToolBlock(BaseModel): - drawing_tool: DrawingTool - - type: Literal["drawing_tool"] - """Always `drawing_tool`.""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/function_delta_block.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/function_delta_block.py deleted file mode 100644 index c439bc4b3f..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/function_delta_block.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Literal, Union - -__all__ = ["FunctionToolBlock"] - -from .....core import BaseModel - - -class FunctionToolOutput(BaseModel): - content: str - - -class FunctionTool(BaseModel): - name: str - arguments: Union[str, dict] - outputs: list[FunctionToolOutput] - - -class FunctionToolBlock(BaseModel): - function: FunctionTool - - type: Literal["function"] - """Always `drawing_tool`.""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/retrieval_delta_black.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/retrieval_delta_black.py deleted file mode 100644 index 4789e9378a..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/retrieval_delta_black.py +++ /dev/null @@ -1,41 +0,0 @@ -from typing import Literal - -from .....core import BaseModel - - -class RetrievalToolOutput(BaseModel): - """ - This class represents the output of a retrieval tool. - - Attributes: - - text (str): The text snippet retrieved from the knowledge base. - - document (str): The name of the document from which the text snippet was retrieved, returned only in intelligent configuration. - """ # noqa: E501 - - text: str - document: str - - -class RetrievalTool(BaseModel): - """ - This class represents the outputs of a retrieval tool. - - Attributes: - - outputs (List[RetrievalToolOutput]): A list of text snippets and their respective document names retrieved from the knowledge base. - """ # noqa: E501 - - outputs: list[RetrievalToolOutput] - - -class RetrievalToolBlock(BaseModel): - """ - This class represents a block for invoking the retrieval tool. - - Attributes: - - retrieval (RetrievalTool): An instance of the RetrievalTool class containing the retrieval outputs. - - type (Literal["retrieval"]): The type of tool being used, always set to "retrieval". - """ - - retrieval: RetrievalTool - type: Literal["retrieval"] - """Always `retrieval`.""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/tools_type.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/tools_type.py deleted file mode 100644 index 98544053d4..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/tools_type.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Annotated, TypeAlias, Union - -from .....core._utils import PropertyInfo -from .code_interpreter_delta_block import CodeInterpreterToolBlock -from .drawing_tool_delta_block import DrawingToolBlock -from .function_delta_block import FunctionToolBlock -from .retrieval_delta_black import RetrievalToolBlock -from .web_browser_delta_block import WebBrowserToolBlock - -__all__ = ["ToolsType"] - - -ToolsType: TypeAlias = Annotated[ - Union[DrawingToolBlock, CodeInterpreterToolBlock, WebBrowserToolBlock, RetrievalToolBlock, FunctionToolBlock], - PropertyInfo(discriminator="type"), -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/web_browser_delta_block.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/web_browser_delta_block.py deleted file mode 100644 index 966e6fe0c8..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools/web_browser_delta_block.py +++ /dev/null @@ -1,48 +0,0 @@ -from typing import Literal - -from .....core import BaseModel - -__all__ = ["WebBrowserToolBlock"] - - -class WebBrowserOutput(BaseModel): - """ - This class represents the output of a web browser search result. - - Attributes: - - title (str): The title of the search result. - - link (str): The URL link to the search result's webpage. - - content (str): The textual content extracted from the search result. - - error_msg (str): Any error message encountered during the search or retrieval process. - """ - - title: str - link: str - content: str - error_msg: str - - -class WebBrowser(BaseModel): - """ - This class represents the input and outputs of a web browser search. - - Attributes: - - input (str): The input query for the web browser search. - - outputs (List[WebBrowserOutput]): A list of search results returned by the web browser. - """ - - input: str - outputs: list[WebBrowserOutput] - - -class WebBrowserToolBlock(BaseModel): - """ - This class represents a block for invoking the web browser tool. - - Attributes: - - web_browser (WebBrowser): An instance of the WebBrowser class containing the search input and outputs. - - type (Literal["web_browser"]): The type of tool being used, always set to "web_browser". - """ - - web_browser: WebBrowser - type: Literal["web_browser"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools_delta_block.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools_delta_block.py deleted file mode 100644 index 781a1ab819..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/assistant/message/tools_delta_block.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Literal - -from ....core import BaseModel -from .tools.tools_type import ToolsType - -__all__ = ["ToolsDeltaBlock"] - - -class ToolsDeltaBlock(BaseModel): - tool_calls: list[ToolsType] - """The index of the content part in the message.""" - - role: str = "tool" - - type: Literal["tool_calls"] = "tool_calls" - """Always `tool_calls`.""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch.py deleted file mode 100644 index 560562915c..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch.py +++ /dev/null @@ -1,82 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import builtins -from typing import Literal, Optional - -from ..core import BaseModel -from .batch_error import BatchError -from .batch_request_counts import BatchRequestCounts - -__all__ = ["Batch", "Errors"] - - -class Errors(BaseModel): - data: Optional[list[BatchError]] = None - - object: Optional[str] = None - """这个类型,一直是`list`。""" - - -class Batch(BaseModel): - id: str - - completion_window: str - """用于执行请求的地址信息。""" - - created_at: int - """这是 Unix timestamp (in seconds) 表示的创建时间。""" - - endpoint: str - """这是ZhipuAI endpoint的地址。""" - - input_file_id: str - """标记为batch的输入文件的ID。""" - - object: Literal["batch"] - """这个类型,一直是`batch`.""" - - status: Literal[ - "validating", "failed", "in_progress", "finalizing", "completed", "expired", "cancelling", "cancelled" - ] - """batch 的状态。""" - - cancelled_at: Optional[int] = None - """Unix timestamp (in seconds) 表示的取消时间。""" - - cancelling_at: Optional[int] = None - """Unix timestamp (in seconds) 表示发起取消的请求时间 """ - - completed_at: Optional[int] = None - """Unix timestamp (in seconds) 表示的完成时间。""" - - error_file_id: Optional[str] = None - """这个文件id包含了执行请求失败的请求的输出。""" - - errors: Optional[Errors] = None - - expired_at: Optional[int] = None - """Unix timestamp (in seconds) 表示的将在过期时间。""" - - expires_at: Optional[int] = None - """Unix timestamp (in seconds) 触发过期""" - - failed_at: Optional[int] = None - """Unix timestamp (in seconds) 表示的失败时间。""" - - finalizing_at: Optional[int] = None - """Unix timestamp (in seconds) 表示的最终时间。""" - - in_progress_at: Optional[int] = None - """Unix timestamp (in seconds) 表示的开始处理时间。""" - - metadata: Optional[builtins.object] = None - """ - key:value形式的元数据,以便将信息存储 - 结构化格式。键的长度是64个字符,值最长512个字符 - """ - - output_file_id: Optional[str] = None - """完成请求的输出文件的ID。""" - - request_counts: Optional[BatchRequestCounts] = None - """批次中不同状态的请求计数""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_create_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_create_params.py deleted file mode 100644 index 3dae65ea46..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_create_params.py +++ /dev/null @@ -1,37 +0,0 @@ -from __future__ import annotations - -from typing import Literal, Optional - -from typing_extensions import Required, TypedDict - -__all__ = ["BatchCreateParams"] - - -class BatchCreateParams(TypedDict, total=False): - completion_window: Required[str] - """The time frame within which the batch should be processed. - - Currently only `24h` is supported. - """ - - endpoint: Required[Literal["/v1/chat/completions", "/v1/embeddings"]] - """The endpoint to be used for all requests in the batch. - - Currently `/v1/chat/completions` and `/v1/embeddings` are supported. - """ - - input_file_id: Required[str] - """The ID of an uploaded file that contains requests for the new batch. - - See [upload file](https://platform.openai.com/docs/api-reference/files/create) - for how to upload a file. - - Your input file must be formatted as a - [JSONL file](https://platform.openai.com/docs/api-reference/batch/requestInput), - and must be uploaded with the purpose `batch`. - """ - - metadata: Optional[dict[str, str]] - """Optional custom metadata for the batch.""" - - auto_delete_input_file: Optional[bool] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_error.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_error.py deleted file mode 100644 index f934db1978..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_error.py +++ /dev/null @@ -1,21 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Optional - -from ..core import BaseModel - -__all__ = ["BatchError"] - - -class BatchError(BaseModel): - code: Optional[str] = None - """定义的业务错误码""" - - line: Optional[int] = None - """文件中的行号""" - - message: Optional[str] = None - """关于对话文件中的错误的描述""" - - param: Optional[str] = None - """参数名称,如果有的话""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_list_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_list_params.py deleted file mode 100644 index 1a68167132..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_list_params.py +++ /dev/null @@ -1,20 +0,0 @@ -from __future__ import annotations - -from typing_extensions import TypedDict - -__all__ = ["BatchListParams"] - - -class BatchListParams(TypedDict, total=False): - after: str - """分页的游标,用于获取下一页的数据。 - - `after` 是一个指向当前页面的游标,用于获取下一页的数据。如果没有提供 `after`,则返回第一页的数据。 - list. - """ - - limit: int - """这个参数用于限制返回的结果数量。 - - Limit 用于限制返回的结果数量。默认值为 10 - """ diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_request_counts.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_request_counts.py deleted file mode 100644 index ca3ccae625..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/batch_request_counts.py +++ /dev/null @@ -1,14 +0,0 @@ -from ..core import BaseModel - -__all__ = ["BatchRequestCounts"] - - -class BatchRequestCounts(BaseModel): - completed: int - """这个数字表示已经完成的请求。""" - - failed: int - """这个数字表示失败的请求。""" - - total: int - """这个数字表示总的请求。""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/async_chat_completion.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/async_chat_completion.py deleted file mode 100644 index c1eed070f3..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/async_chat_completion.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Optional - -from ...core import BaseModel -from .chat_completion import CompletionChoice, CompletionUsage - -__all__ = ["AsyncTaskStatus", "AsyncCompletion"] - - -class AsyncTaskStatus(BaseModel): - id: Optional[str] = None - request_id: Optional[str] = None - model: Optional[str] = None - task_status: Optional[str] = None - - -class AsyncCompletion(BaseModel): - id: Optional[str] = None - request_id: Optional[str] = None - model: Optional[str] = None - task_status: str - choices: list[CompletionChoice] - usage: CompletionUsage diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completion.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completion.py deleted file mode 100644 index 1945a826cd..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completion.py +++ /dev/null @@ -1,43 +0,0 @@ -from typing import Optional - -from ...core import BaseModel - -__all__ = ["Completion", "CompletionUsage"] - - -class Function(BaseModel): - arguments: str - name: str - - -class CompletionMessageToolCall(BaseModel): - id: str - function: Function - type: str - - -class CompletionMessage(BaseModel): - content: Optional[str] = None - role: str - tool_calls: Optional[list[CompletionMessageToolCall]] = None - - -class CompletionUsage(BaseModel): - prompt_tokens: int - completion_tokens: int - total_tokens: int - - -class CompletionChoice(BaseModel): - index: int - finish_reason: str - message: CompletionMessage - - -class Completion(BaseModel): - model: Optional[str] = None - created: Optional[int] = None - choices: list[CompletionChoice] - request_id: Optional[str] = None - id: Optional[str] = None - usage: CompletionUsage diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completion_chunk.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completion_chunk.py deleted file mode 100644 index 27fad0008a..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completion_chunk.py +++ /dev/null @@ -1,57 +0,0 @@ -from typing import Any, Optional - -from ...core import BaseModel - -__all__ = [ - "CompletionUsage", - "ChatCompletionChunk", - "Choice", - "ChoiceDelta", - "ChoiceDeltaFunctionCall", - "ChoiceDeltaToolCall", - "ChoiceDeltaToolCallFunction", -] - - -class ChoiceDeltaFunctionCall(BaseModel): - arguments: Optional[str] = None - name: Optional[str] = None - - -class ChoiceDeltaToolCallFunction(BaseModel): - arguments: Optional[str] = None - name: Optional[str] = None - - -class ChoiceDeltaToolCall(BaseModel): - index: int - id: Optional[str] = None - function: Optional[ChoiceDeltaToolCallFunction] = None - type: Optional[str] = None - - -class ChoiceDelta(BaseModel): - content: Optional[str] = None - role: Optional[str] = None - tool_calls: Optional[list[ChoiceDeltaToolCall]] = None - - -class Choice(BaseModel): - delta: ChoiceDelta - finish_reason: Optional[str] = None - index: int - - -class CompletionUsage(BaseModel): - prompt_tokens: int - completion_tokens: int - total_tokens: int - - -class ChatCompletionChunk(BaseModel): - id: Optional[str] = None - choices: list[Choice] - created: Optional[int] = None - model: Optional[str] = None - usage: Optional[CompletionUsage] = None - extra_json: dict[str, Any] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completions_create_param.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completions_create_param.py deleted file mode 100644 index 6ee4dc4794..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/chat_completions_create_param.py +++ /dev/null @@ -1,8 +0,0 @@ -from typing import Optional - -from typing_extensions import TypedDict - - -class Reference(TypedDict, total=False): - enable: Optional[bool] - search_query: Optional[str] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/code_geex/code_geex_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/code_geex/code_geex_params.py deleted file mode 100644 index 666b38855c..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/code_geex/code_geex_params.py +++ /dev/null @@ -1,146 +0,0 @@ -from typing import Literal, Optional - -from typing_extensions import Required, TypedDict - -__all__ = [ - "CodeGeexTarget", - "CodeGeexContext", - "CodeGeexExtra", -] - - -class CodeGeexTarget(TypedDict, total=False): - """补全的内容参数""" - - path: Optional[str] - """文件路径""" - language: Required[ - Literal[ - "c", - "c++", - "cpp", - "c#", - "csharp", - "c-sharp", - "css", - "cuda", - "dart", - "lua", - "objectivec", - "objective-c", - "objective-c++", - "python", - "perl", - "prolog", - "swift", - "lisp", - "java", - "scala", - "tex", - "jsx", - "tsx", - "vue", - "markdown", - "html", - "php", - "js", - "javascript", - "typescript", - "go", - "shell", - "rust", - "sql", - "kotlin", - "vb", - "ruby", - "pascal", - "r", - "fortran", - "lean", - "matlab", - "delphi", - "scheme", - "basic", - "assembly", - "groovy", - "abap", - "gdscript", - "haskell", - "julia", - "elixir", - "excel", - "clojure", - "actionscript", - "solidity", - "powershell", - "erlang", - "cobol", - "alloy", - "awk", - "thrift", - "sparql", - "augeas", - "cmake", - "f-sharp", - "stan", - "isabelle", - "dockerfile", - "rmarkdown", - "literate-agda", - "tcl", - "glsl", - "antlr", - "verilog", - "racket", - "standard-ml", - "elm", - "yaml", - "smalltalk", - "ocaml", - "idris", - "visual-basic", - "protocol-buffer", - "bluespec", - "applescript", - "makefile", - "tcsh", - "maple", - "systemverilog", - "literate-coffeescript", - "vhdl", - "restructuredtext", - "sas", - "literate-haskell", - "java-server-pages", - "coffeescript", - "emacs-lisp", - "mathematica", - "xslt", - "zig", - "common-lisp", - "stata", - "agda", - "ada", - ] - ] - """代码语言类型,如python""" - code_prefix: Required[str] - """补全位置的前文""" - code_suffix: Required[str] - """补全位置的后文""" - - -class CodeGeexContext(TypedDict, total=False): - """附加代码""" - - path: Required[str] - """附加代码文件的路径""" - code: Required[str] - """附加的代码内容""" - - -class CodeGeexExtra(TypedDict, total=False): - target: Required[CodeGeexTarget] - """补全的内容参数""" - contexts: Optional[list[CodeGeexContext]] - """附加代码""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/embeddings.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/embeddings.py deleted file mode 100644 index 8425b5c866..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/embeddings.py +++ /dev/null @@ -1,21 +0,0 @@ -from __future__ import annotations - -from typing import Optional - -from ..core import BaseModel -from .chat.chat_completion import CompletionUsage - -__all__ = ["Embedding", "EmbeddingsResponded"] - - -class Embedding(BaseModel): - object: str - index: Optional[int] = None - embedding: list[float] - - -class EmbeddingsResponded(BaseModel): - object: str - data: list[Embedding] - model: str - usage: CompletionUsage diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/__init__.py deleted file mode 100644 index bbaf59e4d7..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .file_deleted import FileDeleted -from .file_object import FileObject, ListOfFileObject -from .upload_detail import UploadDetail - -__all__ = ["FileObject", "ListOfFileObject", "UploadDetail", "FileDeleted"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_create_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_create_params.py deleted file mode 100644 index 4ef93b1c05..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_create_params.py +++ /dev/null @@ -1,38 +0,0 @@ -from __future__ import annotations - -from typing import Literal, Optional - -from typing_extensions import Required, TypedDict - -__all__ = ["FileCreateParams"] - -from ...core import FileTypes -from . import UploadDetail - - -class FileCreateParams(TypedDict, total=False): - file: FileTypes - """file和 upload_detail二选一必填""" - - upload_detail: list[UploadDetail] - """file和 upload_detail二选一必填""" - - purpose: Required[Literal["fine-tune", "retrieval", "batch"]] - """ - 上传文件的用途,支持 "fine-tune和 "retrieval" - retrieval支持上传Doc、Docx、PDF、Xlsx、URL类型文件,且单个文件的大小不超过 5MB。 - fine-tune支持上传.jsonl文件且当前单个文件的大小最大可为 100 MB ,文件中语料格式需满足微调指南中所描述的格式。 - """ - custom_separator: Optional[list[str]] - """ - 当 purpose 为 retrieval 且文件类型为 pdf, url, docx 时上传,切片规则默认为 `\n`。 - """ - knowledge_id: str - """ - 当文件上传目的为 retrieval 时,需要指定知识库ID进行上传。 - """ - - sentence_size: int - """ - 当文件上传目的为 retrieval 时,需要指定知识库ID进行上传。 - """ diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_deleted.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_deleted.py deleted file mode 100644 index a384b1a69a..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_deleted.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Literal - -from ...core import BaseModel - -__all__ = ["FileDeleted"] - - -class FileDeleted(BaseModel): - id: str - - deleted: bool - - object: Literal["file"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_object.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_object.py deleted file mode 100644 index 8f9d0fbb8e..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/file_object.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Optional - -from ...core import BaseModel - -__all__ = ["FileObject", "ListOfFileObject"] - - -class FileObject(BaseModel): - id: Optional[str] = None - bytes: Optional[int] = None - created_at: Optional[int] = None - filename: Optional[str] = None - object: Optional[str] = None - purpose: Optional[str] = None - status: Optional[str] = None - status_details: Optional[str] = None - - -class ListOfFileObject(BaseModel): - object: Optional[str] = None - data: list[FileObject] - has_more: Optional[bool] = None diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/upload_detail.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/upload_detail.py deleted file mode 100644 index 8f1ca5ce57..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/files/upload_detail.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Optional - -from ...core import BaseModel - - -class UploadDetail(BaseModel): - url: str - knowledge_type: int - file_name: Optional[str] = None - sentence_size: Optional[int] = None - custom_separator: Optional[list[str]] = None - callback_url: Optional[str] = None - callback_header: Optional[dict[str, str]] = None diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/__init__.py deleted file mode 100644 index 416f516ef7..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from __future__ import annotations - -from .fine_tuning_job import FineTuningJob, ListOfFineTuningJob -from .fine_tuning_job_event import FineTuningJobEvent diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/fine_tuning_job.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/fine_tuning_job.py deleted file mode 100644 index 75c7553dbe..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/fine_tuning_job.py +++ /dev/null @@ -1,51 +0,0 @@ -from typing import Optional, Union - -from ...core import BaseModel - -__all__ = ["FineTuningJob", "Error", "Hyperparameters", "ListOfFineTuningJob"] - - -class Error(BaseModel): - code: str - message: str - param: Optional[str] = None - - -class Hyperparameters(BaseModel): - n_epochs: Union[str, int, None] = None - - -class FineTuningJob(BaseModel): - id: Optional[str] = None - - request_id: Optional[str] = None - - created_at: Optional[int] = None - - error: Optional[Error] = None - - fine_tuned_model: Optional[str] = None - - finished_at: Optional[int] = None - - hyperparameters: Optional[Hyperparameters] = None - - model: Optional[str] = None - - object: Optional[str] = None - - result_files: list[str] - - status: str - - trained_tokens: Optional[int] = None - - training_file: str - - validation_file: Optional[str] = None - - -class ListOfFineTuningJob(BaseModel): - object: Optional[str] = None - data: list[FineTuningJob] - has_more: Optional[bool] = None diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/fine_tuning_job_event.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/fine_tuning_job_event.py deleted file mode 100644 index f996cff114..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/fine_tuning_job_event.py +++ /dev/null @@ -1,35 +0,0 @@ -from typing import Optional, Union - -from ...core import BaseModel - -__all__ = ["FineTuningJobEvent", "Metric", "JobEvent"] - - -class Metric(BaseModel): - epoch: Optional[Union[str, int, float]] = None - current_steps: Optional[int] = None - total_steps: Optional[int] = None - elapsed_time: Optional[str] = None - remaining_time: Optional[str] = None - trained_tokens: Optional[int] = None - loss: Optional[Union[str, int, float]] = None - eval_loss: Optional[Union[str, int, float]] = None - acc: Optional[Union[str, int, float]] = None - eval_acc: Optional[Union[str, int, float]] = None - learning_rate: Optional[Union[str, int, float]] = None - - -class JobEvent(BaseModel): - object: Optional[str] = None - id: Optional[str] = None - type: Optional[str] = None - created_at: Optional[int] = None - level: Optional[str] = None - message: Optional[str] = None - data: Optional[Metric] = None - - -class FineTuningJobEvent(BaseModel): - object: Optional[str] = None - data: list[JobEvent] - has_more: Optional[bool] = None diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/job_create_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/job_create_params.py deleted file mode 100644 index e1ebc352bc..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/job_create_params.py +++ /dev/null @@ -1,15 +0,0 @@ -from __future__ import annotations - -from typing import Literal, Union - -from typing_extensions import TypedDict - -__all__ = ["Hyperparameters"] - - -class Hyperparameters(TypedDict, total=False): - batch_size: Union[Literal["auto"], int] - - learning_rate_multiplier: Union[Literal["auto"], float] - - n_epochs: Union[Literal["auto"], int] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/models/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/models/__init__.py deleted file mode 100644 index 57d0d2511d..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/models/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .fine_tuned_models import FineTunedModelsStatus diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/models/fine_tuned_models.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/models/fine_tuned_models.py deleted file mode 100644 index b286a5b577..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/fine_tuning/models/fine_tuned_models.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import ClassVar - -from ....core import PYDANTIC_V2, BaseModel, ConfigDict - -__all__ = ["FineTunedModelsStatus"] - - -class FineTunedModelsStatus(BaseModel): - if PYDANTIC_V2: - model_config: ClassVar[ConfigDict] = ConfigDict(extra="allow", protected_namespaces=()) - request_id: str # 请求id - model_name: str # 模型名称 - delete_status: str # 删除状态 deleting(删除中), deleted (已删除) diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/image.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/image.py deleted file mode 100644 index 3bcad0acab..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/image.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import annotations - -from typing import Optional - -from ..core import BaseModel - -__all__ = ["GeneratedImage", "ImagesResponded"] - - -class GeneratedImage(BaseModel): - b64_json: Optional[str] = None - url: Optional[str] = None - revised_prompt: Optional[str] = None - - -class ImagesResponded(BaseModel): - created: int - data: list[GeneratedImage] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/__init__.py deleted file mode 100644 index 8c81d703e2..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from .knowledge import KnowledgeInfo -from .knowledge_used import KnowledgeStatistics, KnowledgeUsed - -__all__ = [ - "KnowledgeInfo", - "KnowledgeStatistics", - "KnowledgeUsed", -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/__init__.py deleted file mode 100644 index 59cb41d712..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from .document import DocumentData, DocumentFailedInfo, DocumentObject, DocumentSuccessInfo - -__all__ = [ - "DocumentData", - "DocumentObject", - "DocumentSuccessInfo", - "DocumentFailedInfo", -] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document.py deleted file mode 100644 index 980bc6f4a7..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document.py +++ /dev/null @@ -1,51 +0,0 @@ -from typing import Optional - -from ....core import BaseModel - -__all__ = ["DocumentData", "DocumentObject", "DocumentSuccessInfo", "DocumentFailedInfo"] - - -class DocumentSuccessInfo(BaseModel): - documentId: Optional[str] = None - """文件id""" - filename: Optional[str] = None - """文件名称""" - - -class DocumentFailedInfo(BaseModel): - failReason: Optional[str] = None - """上传失败的原因,包括:文件格式不支持、文件大小超出限制、知识库容量已满、容量上限为 50 万字。""" - filename: Optional[str] = None - """文件名称""" - documentId: Optional[str] = None - """知识库id""" - - -class DocumentObject(BaseModel): - """文档信息""" - - successInfos: Optional[list[DocumentSuccessInfo]] = None - """上传成功的文件信息""" - failedInfos: Optional[list[DocumentFailedInfo]] = None - """上传失败的文件信息""" - - -class DocumentDataFailInfo(BaseModel): - """失败原因""" - - embedding_code: Optional[int] = ( - None # 失败码 10001:知识不可用,知识库空间已达上限 10002:知识不可用,知识库空间已达上限(字数超出限制) - ) - embedding_msg: Optional[str] = None # 失败原因 - - -class DocumentData(BaseModel): - id: str = None # 知识唯一id - custom_separator: list[str] = None # 切片规则 - sentence_size: str = None # 切片大小 - length: int = None # 文件大小(字节) - word_num: int = None # 文件字数 - name: str = None # 文件名 - url: str = None # 文件下载链接 - embedding_stat: int = None # 0:向量化中 1:向量化完成 2:向量化失败 - failInfo: Optional[DocumentDataFailInfo] = None # 失败原因 向量化失败embedding_stat=2的时候 会有此值 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_edit_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_edit_params.py deleted file mode 100644 index 509cb3a451..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_edit_params.py +++ /dev/null @@ -1,29 +0,0 @@ -from typing import Optional, TypedDict - -__all__ = ["DocumentEditParams"] - - -class DocumentEditParams(TypedDict): - """ - 知识参数类型定义 - - Attributes: - id (str): 知识ID - knowledge_type (int): 知识类型: - 1:文章知识: 支持pdf,url,docx - 2.问答知识-文档: 支持pdf,url,docx - 3.问答知识-表格: 支持xlsx - 4.商品库-表格: 支持xlsx - 5.自定义: 支持pdf,url,docx - custom_separator (Optional[List[str]]): 当前知识类型为自定义(knowledge_type=5)时的切片规则,默认\n - sentence_size (Optional[int]): 当前知识类型为自定义(knowledge_type=5)时的切片字数,取值范围: 20-2000,默认300 - callback_url (Optional[str]): 回调地址 - callback_header (Optional[dict]): 回调时携带的header - """ - - id: str - knowledge_type: int - custom_separator: Optional[list[str]] - sentence_size: Optional[int] - callback_url: Optional[str] - callback_header: Optional[dict[str, str]] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_list_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_list_params.py deleted file mode 100644 index 910c8c045e..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_list_params.py +++ /dev/null @@ -1,26 +0,0 @@ -from __future__ import annotations - -from typing import Optional - -from typing_extensions import TypedDict - - -class DocumentListParams(TypedDict, total=False): - """ - 文件查询参数类型定义 - - Attributes: - purpose (Optional[str]): 文件用途 - knowledge_id (Optional[str]): 当文件用途为 retrieval 时,需要提供查询的知识库ID - page (Optional[int]): 页,默认1 - limit (Optional[int]): 查询文件列表数,默认10 - after (Optional[str]): 查询指定fileID之后的文件列表(当文件用途为 fine-tune 时需要) - order (Optional[str]): 排序规则,可选值['desc', 'asc'],默认desc(当文件用途为 fine-tune 时需要) - """ - - purpose: Optional[str] - knowledge_id: Optional[str] - page: Optional[int] - limit: Optional[int] - after: Optional[str] - order: Optional[str] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_list_resp.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_list_resp.py deleted file mode 100644 index acae4fad9f..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/document/document_list_resp.py +++ /dev/null @@ -1,11 +0,0 @@ -from __future__ import annotations - -from ....core import BaseModel -from . import DocumentData - -__all__ = ["DocumentPage"] - - -class DocumentPage(BaseModel): - list: list[DocumentData] - object: str diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge.py deleted file mode 100644 index bc6f159eb2..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import Optional - -from ...core import BaseModel - -__all__ = ["KnowledgeInfo"] - - -class KnowledgeInfo(BaseModel): - id: Optional[str] = None - """知识库唯一 id""" - embedding_id: Optional[str] = ( - None # 知识库绑定的向量化模型 见模型列表 [内部服务开放接口文档](https://lslfd0slxc.feishu.cn/docx/YauWdbBiMopV0FxB7KncPWCEn8f#H15NduiQZo3ugmxnWQFcfAHpnQ4) - ) - name: Optional[str] = None # 知识库名称 100字限制 - customer_identifier: Optional[str] = None # 用户标识 长度32位以内 - description: Optional[str] = None # 知识库描述 500字限制 - background: Optional[str] = None # 背景颜色(给枚举)'blue', 'red', 'orange', 'purple', 'sky' - icon: Optional[str] = ( - None # 知识库图标(给枚举) question: 问号、book: 书籍、seal: 印章、wrench: 扳手、tag: 标签、horn: 喇叭、house: 房子 # noqa: E501 - ) - bucket_id: Optional[str] = None # 桶id 限制32位 diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_create_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_create_params.py deleted file mode 100644 index c3da201727..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_create_params.py +++ /dev/null @@ -1,30 +0,0 @@ -from __future__ import annotations - -from typing import Literal, Optional - -from typing_extensions import TypedDict - -__all__ = ["KnowledgeBaseParams"] - - -class KnowledgeBaseParams(TypedDict): - """ - 知识库参数类型定义 - - Attributes: - embedding_id (int): 知识库绑定的向量化模型ID - name (str): 知识库名称,限制100字 - customer_identifier (Optional[str]): 用户标识,长度32位以内 - description (Optional[str]): 知识库描述,限制500字 - background (Optional[Literal['blue', 'red', 'orange', 'purple', 'sky']]): 背景颜色 - icon (Optional[Literal['question', 'book', 'seal', 'wrench', 'tag', 'horn', 'house']]): 知识库图标 - bucket_id (Optional[str]): 桶ID,限制32位 - """ - - embedding_id: int - name: str - customer_identifier: Optional[str] - description: Optional[str] - background: Optional[Literal["blue", "red", "orange", "purple", "sky"]] = None - icon: Optional[Literal["question", "book", "seal", "wrench", "tag", "horn", "house"]] = None - bucket_id: Optional[str] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_list_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_list_params.py deleted file mode 100644 index a221b28e46..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_list_params.py +++ /dev/null @@ -1,15 +0,0 @@ -from __future__ import annotations - -from typing_extensions import TypedDict - -__all__ = ["KnowledgeListParams"] - - -class KnowledgeListParams(TypedDict, total=False): - page: int = 1 - """ 页码,默认 1,第一页 - """ - - size: int = 10 - """每页数量 默认10 - """ diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_list_resp.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_list_resp.py deleted file mode 100644 index e462eddc55..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_list_resp.py +++ /dev/null @@ -1,11 +0,0 @@ -from __future__ import annotations - -from ...core import BaseModel -from . import KnowledgeInfo - -__all__ = ["KnowledgePage"] - - -class KnowledgePage(BaseModel): - list: list[KnowledgeInfo] - object: str diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_used.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_used.py deleted file mode 100644 index cfda709702..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/knowledge/knowledge_used.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import Optional - -from ...core import BaseModel - -__all__ = ["KnowledgeStatistics", "KnowledgeUsed"] - - -class KnowledgeStatistics(BaseModel): - """ - 使用量统计 - """ - - word_num: Optional[int] = None - length: Optional[int] = None - - -class KnowledgeUsed(BaseModel): - used: Optional[KnowledgeStatistics] = None - """已使用量""" - total: Optional[KnowledgeStatistics] = None - """知识库总量""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/sensitive_word_check/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/sensitive_word_check/__init__.py deleted file mode 100644 index c9bd60419c..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/sensitive_word_check/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .sensitive_word_check import SensitiveWordCheckRequest - -__all__ = ["SensitiveWordCheckRequest"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/sensitive_word_check/sensitive_word_check.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/sensitive_word_check/sensitive_word_check.py deleted file mode 100644 index 0c37d99e65..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/sensitive_word_check/sensitive_word_check.py +++ /dev/null @@ -1,14 +0,0 @@ -from typing import Optional - -from typing_extensions import TypedDict - - -class SensitiveWordCheckRequest(TypedDict, total=False): - type: Optional[str] - """敏感词类型,当前仅支持ALL""" - status: Optional[str] - """敏感词启用禁用状态 - 启用:ENABLE - 禁用:DISABLE - 备注:默认开启敏感词校验,如果要关闭敏感词校验,需联系商务获取对应权限,否则敏感词禁用不生效。 - """ diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/__init__.py deleted file mode 100644 index 62f77344ee..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from .web_search import ( - SearchIntent, - SearchRecommend, - SearchResult, - WebSearch, -) -from .web_search_chunk import WebSearchChunk - -__all__ = ["WebSearch", "SearchIntent", "SearchResult", "SearchRecommend", "WebSearchChunk"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/tools_web_search_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/tools_web_search_params.py deleted file mode 100644 index b3a3b26f07..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/tools_web_search_params.py +++ /dev/null @@ -1,35 +0,0 @@ -from __future__ import annotations - -from typing import Optional, Union - -from typing_extensions import TypedDict - -__all__ = ["WebSearchParams"] - - -class WebSearchParams(TypedDict): - """ - 工具名:web-search-pro参数类型定义 - - Attributes: - :param model: str, 模型名称 - :param request_id: Optional[str], 请求ID - :param stream: Optional[bool], 是否流式 - :param messages: Union[str, List[str], List[int], object, None], - 包含历史对话上下文的内容,按照 {"role": "user", "content": "你好"} 的json 数组形式进行传参 - 当前版本仅支持 User Message 单轮对话,工具会理解User Message并进行搜索, - 请尽可能传入不带指令格式的用户原始提问,以提高搜索准确率。 - :param scope: Optional[str], 指定搜索范围,全网、学术等,默认全网 - :param location: Optional[str], 指定搜索用户地区 location 提高相关性 - :param recent_days: Optional[int],支持指定返回 N 天(1-30)更新的搜索结果 - - - """ - - model: str - request_id: Optional[str] - stream: Optional[bool] - messages: Union[str, list[str], list[int], object, None] - scope: Optional[str] = None - location: Optional[str] = None - recent_days: Optional[int] = None diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/web_search.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/web_search.py deleted file mode 100644 index ac9fa3821e..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/web_search.py +++ /dev/null @@ -1,71 +0,0 @@ -from typing import Optional - -from ...core import BaseModel - -__all__ = [ - "WebSearch", - "SearchIntent", - "SearchResult", - "SearchRecommend", -] - - -class SearchIntent(BaseModel): - index: int - # 搜索轮次,默认为 0 - query: str - # 搜索优化 query - intent: str - # 判断的意图类型 - keywords: str - # 搜索关键词 - - -class SearchResult(BaseModel): - index: int - # 搜索轮次,默认为 0 - title: str - # 标题 - link: str - # 链接 - content: str - # 内容 - icon: str - # 图标 - media: str - # 来源媒体 - refer: str - # 角标序号 [ref_1] - - -class SearchRecommend(BaseModel): - index: int - # 搜索轮次,默认为 0 - query: str - # 推荐query - - -class WebSearchMessageToolCall(BaseModel): - id: str - search_intent: Optional[SearchIntent] - search_result: Optional[SearchResult] - search_recommend: Optional[SearchRecommend] - type: str - - -class WebSearchMessage(BaseModel): - role: str - tool_calls: Optional[list[WebSearchMessageToolCall]] = None - - -class WebSearchChoice(BaseModel): - index: int - finish_reason: str - message: WebSearchMessage - - -class WebSearch(BaseModel): - created: Optional[int] = None - choices: list[WebSearchChoice] - request_id: Optional[str] = None - id: Optional[str] = None diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/web_search_chunk.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/web_search_chunk.py deleted file mode 100644 index 7fb0e02bb5..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/tools/web_search_chunk.py +++ /dev/null @@ -1,33 +0,0 @@ -from typing import Optional - -from ...core import BaseModel -from .web_search import SearchIntent, SearchRecommend, SearchResult - -__all__ = ["WebSearchChunk"] - - -class ChoiceDeltaToolCall(BaseModel): - index: int - id: Optional[str] = None - - search_intent: Optional[SearchIntent] = None - search_result: Optional[SearchResult] = None - search_recommend: Optional[SearchRecommend] = None - type: Optional[str] = None - - -class ChoiceDelta(BaseModel): - role: Optional[str] = None - tool_calls: Optional[list[ChoiceDeltaToolCall]] = None - - -class Choice(BaseModel): - delta: ChoiceDelta - finish_reason: Optional[str] = None - index: int - - -class WebSearchChunk(BaseModel): - id: Optional[str] = None - choices: list[Choice] - created: Optional[int] = None diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/__init__.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/__init__.py deleted file mode 100644 index b14072b1a7..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .video_object import VideoObject, VideoResult - -__all__ = ["VideoObject", "VideoResult"] diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/video_create_params.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/video_create_params.py deleted file mode 100644 index f5489d708e..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/video_create_params.py +++ /dev/null @@ -1,27 +0,0 @@ -from __future__ import annotations - -from typing import Optional - -from typing_extensions import TypedDict - -__all__ = ["VideoCreateParams"] - -from ..sensitive_word_check import SensitiveWordCheckRequest - - -class VideoCreateParams(TypedDict, total=False): - model: str - """模型编码""" - prompt: str - """所需视频的文本描述""" - image_url: str - """所需视频的文本描述""" - sensitive_word_check: Optional[SensitiveWordCheckRequest] - """支持 URL 或者 Base64、传入 image 奖进行图生视频 - * 图片格式: - * 图片大小:""" - request_id: str - """由用户端传参,需保证唯一性;用于区分每次请求的唯一标识,用户端不传时平台会默认生成。""" - - user_id: str - """用户端。""" diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/video_object.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/video_object.py deleted file mode 100644 index 85c3844d8a..0000000000 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/video/video_object.py +++ /dev/null @@ -1,30 +0,0 @@ -from typing import Optional - -from ...core import BaseModel - -__all__ = ["VideoObject", "VideoResult"] - - -class VideoResult(BaseModel): - url: str - """视频url""" - cover_image_url: str - """预览图""" - - -class VideoObject(BaseModel): - id: Optional[str] = None - """智谱 AI 开放平台生成的任务订单号,调用请求结果接口时请使用此订单号""" - - model: str - """模型名称""" - - video_result: list[VideoResult] - """视频生成结果""" - - task_status: str - """处理状态,PROCESSING(处理中),SUCCESS(成功),FAIL(失败) - 注:处理中状态需通过查询获取结果""" - - request_id: str - """用户在客户端请求时提交的任务编号或者平台生成的任务编号""" diff --git a/api/core/ops/langfuse_trace/langfuse_trace.py b/api/core/ops/langfuse_trace/langfuse_trace.py index 171e34f8cb..0cba40c51a 100644 --- a/api/core/ops/langfuse_trace/langfuse_trace.py +++ b/api/core/ops/langfuse_trace/langfuse_trace.py @@ -110,26 +110,35 @@ class LangFuseDataTrace(BaseTraceInstance): self.add_trace(langfuse_trace_data=trace_data) # through workflow_run_id get all_nodes_execution - workflow_nodes_executions = ( - db.session.query( - WorkflowNodeExecution.id, - WorkflowNodeExecution.tenant_id, - WorkflowNodeExecution.app_id, - WorkflowNodeExecution.title, - WorkflowNodeExecution.node_type, - WorkflowNodeExecution.status, - WorkflowNodeExecution.inputs, - WorkflowNodeExecution.outputs, - WorkflowNodeExecution.created_at, - WorkflowNodeExecution.elapsed_time, - WorkflowNodeExecution.process_data, - WorkflowNodeExecution.execution_metadata, - ) + workflow_nodes_execution_id_records = ( + db.session.query(WorkflowNodeExecution.id) .filter(WorkflowNodeExecution.workflow_run_id == trace_info.workflow_run_id) .all() ) - for node_execution in workflow_nodes_executions: + for node_execution_id_record in workflow_nodes_execution_id_records: + node_execution = ( + db.session.query( + WorkflowNodeExecution.id, + WorkflowNodeExecution.tenant_id, + WorkflowNodeExecution.app_id, + WorkflowNodeExecution.title, + WorkflowNodeExecution.node_type, + WorkflowNodeExecution.status, + WorkflowNodeExecution.inputs, + WorkflowNodeExecution.outputs, + WorkflowNodeExecution.created_at, + WorkflowNodeExecution.elapsed_time, + WorkflowNodeExecution.process_data, + WorkflowNodeExecution.execution_metadata, + ) + .filter(WorkflowNodeExecution.id == node_execution_id_record.id) + .first() + ) + + if not node_execution: + continue + node_execution_id = node_execution.id tenant_id = node_execution.tenant_id app_id = node_execution.app_id diff --git a/api/core/ops/langsmith_trace/langsmith_trace.py b/api/core/ops/langsmith_trace/langsmith_trace.py index 37cbea13fd..ad45050405 100644 --- a/api/core/ops/langsmith_trace/langsmith_trace.py +++ b/api/core/ops/langsmith_trace/langsmith_trace.py @@ -100,26 +100,35 @@ class LangSmithDataTrace(BaseTraceInstance): self.add_run(langsmith_run) # through workflow_run_id get all_nodes_execution - workflow_nodes_executions = ( - db.session.query( - WorkflowNodeExecution.id, - WorkflowNodeExecution.tenant_id, - WorkflowNodeExecution.app_id, - WorkflowNodeExecution.title, - WorkflowNodeExecution.node_type, - WorkflowNodeExecution.status, - WorkflowNodeExecution.inputs, - WorkflowNodeExecution.outputs, - WorkflowNodeExecution.created_at, - WorkflowNodeExecution.elapsed_time, - WorkflowNodeExecution.process_data, - WorkflowNodeExecution.execution_metadata, - ) + workflow_nodes_execution_id_records = ( + db.session.query(WorkflowNodeExecution.id) .filter(WorkflowNodeExecution.workflow_run_id == trace_info.workflow_run_id) .all() ) - for node_execution in workflow_nodes_executions: + for node_execution_id_record in workflow_nodes_execution_id_records: + node_execution = ( + db.session.query( + WorkflowNodeExecution.id, + WorkflowNodeExecution.tenant_id, + WorkflowNodeExecution.app_id, + WorkflowNodeExecution.title, + WorkflowNodeExecution.node_type, + WorkflowNodeExecution.status, + WorkflowNodeExecution.inputs, + WorkflowNodeExecution.outputs, + WorkflowNodeExecution.created_at, + WorkflowNodeExecution.elapsed_time, + WorkflowNodeExecution.process_data, + WorkflowNodeExecution.execution_metadata, + ) + .filter(WorkflowNodeExecution.id == node_execution_id_record.id) + .first() + ) + + if not node_execution: + continue + node_execution_id = node_execution.id tenant_id = node_execution.tenant_id app_id = node_execution.app_id diff --git a/api/core/rag/datasource/keyword/keyword_base.py b/api/core/rag/datasource/keyword/keyword_base.py index 4b9ec460e6..be00687abd 100644 --- a/api/core/rag/datasource/keyword/keyword_base.py +++ b/api/core/rag/datasource/keyword/keyword_base.py @@ -27,9 +27,11 @@ class BaseKeyword(ABC): def delete_by_ids(self, ids: list[str]) -> None: raise NotImplementedError + @abstractmethod def delete(self) -> None: raise NotImplementedError + @abstractmethod def search(self, query: str, **kwargs: Any) -> list[Document]: raise NotImplementedError diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/__init__.py b/api/core/rag/datasource/vdb/baidu/__init__.py similarity index 100% rename from api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/types/chat/__init__.py rename to api/core/rag/datasource/vdb/baidu/__init__.py diff --git a/api/core/rag/datasource/vdb/baidu/baidu_vector.py b/api/core/rag/datasource/vdb/baidu/baidu_vector.py new file mode 100644 index 0000000000..543cfa67b3 --- /dev/null +++ b/api/core/rag/datasource/vdb/baidu/baidu_vector.py @@ -0,0 +1,272 @@ +import json +import time +import uuid +from typing import Any + +from pydantic import BaseModel, model_validator +from pymochow import MochowClient +from pymochow.auth.bce_credentials import BceCredentials +from pymochow.configuration import Configuration +from pymochow.model.enum import FieldType, IndexState, IndexType, MetricType, TableState +from pymochow.model.schema import Field, HNSWParams, Schema, VectorIndex +from pymochow.model.table import AnnSearch, HNSWSearchParams, Partition, Row + +from configs import dify_config +from core.rag.datasource.entity.embedding import Embeddings +from core.rag.datasource.vdb.vector_base import BaseVector +from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory +from core.rag.datasource.vdb.vector_type import VectorType +from core.rag.models.document import Document +from extensions.ext_redis import redis_client +from models.dataset import Dataset + + +class BaiduConfig(BaseModel): + endpoint: str + connection_timeout_in_mills: int = 30 * 1000 + account: str + api_key: str + database: str + index_type: str = "HNSW" + metric_type: str = "L2" + shard: int = 1 + replicas: int = 3 + + @model_validator(mode="before") + @classmethod + def validate_config(cls, values: dict) -> dict: + if not values["endpoint"]: + raise ValueError("config BAIDU_VECTOR_DB_ENDPOINT is required") + if not values["account"]: + raise ValueError("config BAIDU_VECTOR_DB_ACCOUNT is required") + if not values["api_key"]: + raise ValueError("config BAIDU_VECTOR_DB_API_KEY is required") + if not values["database"]: + raise ValueError("config BAIDU_VECTOR_DB_DATABASE is required") + return values + + +class BaiduVector(BaseVector): + field_id: str = "id" + field_vector: str = "vector" + field_text: str = "text" + field_metadata: str = "metadata" + field_app_id: str = "app_id" + field_annotation_id: str = "annotation_id" + index_vector: str = "vector_idx" + + def __init__(self, collection_name: str, config: BaiduConfig): + super().__init__(collection_name) + self._client_config = config + self._client = self._init_client(config) + self._db = self._init_database() + + def get_type(self) -> str: + return VectorType.BAIDU + + def to_index_struct(self) -> dict: + return {"type": self.get_type(), "vector_store": {"class_prefix": self._collection_name}} + + def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): + self._create_table(len(embeddings[0])) + self.add_texts(texts, embeddings) + + def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + texts = [doc.page_content for doc in documents] + metadatas = [doc.metadata for doc in documents] + total_count = len(documents) + batch_size = 1000 + + # upsert texts and embeddings batch by batch + table = self._db.table(self._collection_name) + for start in range(0, total_count, batch_size): + end = min(start + batch_size, total_count) + rows = [] + for i in range(start, end, 1): + row = Row( + id=metadatas[i].get("doc_id", str(uuid.uuid4())), + vector=embeddings[i], + text=texts[i], + metadata=json.dumps(metadatas[i]), + app_id=metadatas[i].get("app_id", ""), + annotation_id=metadatas[i].get("annotation_id", ""), + ) + rows.append(row) + table.upsert(rows=rows) + + # rebuild vector index after upsert finished + table.rebuild_index(self.index_vector) + while True: + time.sleep(1) + index = table.describe_index(self.index_vector) + if index.state == IndexState.NORMAL: + break + + def text_exists(self, id: str) -> bool: + res = self._db.table(self._collection_name).query(primary_key={self.field_id: id}) + if res and res.code == 0: + return True + return False + + def delete_by_ids(self, ids: list[str]) -> None: + quoted_ids = [f"'{id}'" for id in ids] + self._db.table(self._collection_name).delete(filter=f"id IN({', '.join(quoted_ids)})") + + def delete_by_metadata_field(self, key: str, value: str) -> None: + self._db.table(self._collection_name).delete(filter=f"{key} = '{value}'") + + def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + anns = AnnSearch( + vector_field=self.field_vector, + vector_floats=query_vector, + params=HNSWSearchParams(ef=kwargs.get("ef", 10), limit=kwargs.get("top_k", 4)), + ) + res = self._db.table(self._collection_name).search( + anns=anns, + projections=[self.field_id, self.field_text, self.field_metadata], + retrieve_vector=True, + ) + score_threshold = float(kwargs.get("score_threshold") or 0.0) + return self._get_search_res(res, score_threshold) + + def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: + # baidu vector database doesn't support bm25 search on current version + return [] + + def _get_search_res(self, res, score_threshold): + docs = [] + for row in res.rows: + row_data = row.get("row", {}) + meta = row_data.get(self.field_metadata) + if meta is not None: + meta = json.loads(meta) + score = row.get("score", 0.0) + if score > score_threshold: + meta["score"] = score + doc = Document(page_content=row_data.get(self.field_text), metadata=meta) + docs.append(doc) + + return docs + + def delete(self) -> None: + self._db.drop_table(table_name=self._collection_name) + + def _init_client(self, config) -> MochowClient: + config = Configuration(credentials=BceCredentials(config.account, config.api_key), endpoint=config.endpoint) + client = MochowClient(config) + return client + + def _init_database(self): + exists = False + for db in self._client.list_databases(): + if db.database_name == self._client_config.database: + exists = True + break + # Create database if not existed + if exists: + return self._client.database(self._client_config.database) + else: + return self._client.create_database(database_name=self._client_config.database) + + def _table_existed(self) -> bool: + tables = self._db.list_table() + return any(table.table_name == self._collection_name for table in tables) + + def _create_table(self, dimension: int) -> None: + # Try to grab distributed lock and create table + lock_name = "vector_indexing_lock_{}".format(self._collection_name) + with redis_client.lock(lock_name, timeout=20): + table_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + if redis_client.get(table_exist_cache_key): + return + + if self._table_existed(): + return + + self.delete() + + # check IndexType and MetricType + index_type = None + for k, v in IndexType.__members__.items(): + if k == self._client_config.index_type: + index_type = v + if index_type is None: + raise ValueError("unsupported index_type") + metric_type = None + for k, v in MetricType.__members__.items(): + if k == self._client_config.metric_type: + metric_type = v + if metric_type is None: + raise ValueError("unsupported metric_type") + + # Construct field schema + fields = [] + fields.append( + Field( + self.field_id, + FieldType.STRING, + primary_key=True, + partition_key=True, + auto_increment=False, + not_null=True, + ) + ) + fields.append(Field(self.field_metadata, FieldType.STRING, not_null=True)) + fields.append(Field(self.field_app_id, FieldType.STRING)) + fields.append(Field(self.field_annotation_id, FieldType.STRING)) + fields.append(Field(self.field_text, FieldType.TEXT, not_null=True)) + fields.append(Field(self.field_vector, FieldType.FLOAT_VECTOR, not_null=True, dimension=dimension)) + + # Construct vector index params + indexes = [] + indexes.append( + VectorIndex( + index_name="vector_idx", + index_type=index_type, + field="vector", + metric_type=metric_type, + params=HNSWParams(m=16, efconstruction=200), + ) + ) + + # Create table + self._db.create_table( + table_name=self._collection_name, + replication=self._client_config.replicas, + partition=Partition(partition_num=self._client_config.shard), + schema=Schema(fields=fields, indexes=indexes), + description="Table for Dify", + ) + + redis_client.set(table_exist_cache_key, 1, ex=3600) + + # Wait for table created + while True: + time.sleep(1) + table = self._db.describe_table(self._collection_name) + if table.state == TableState.NORMAL: + break + + +class BaiduVectorFactory(AbstractVectorFactory): + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> BaiduVector: + if dataset.index_struct_dict: + class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] + collection_name = class_prefix.lower() + else: + dataset_id = dataset.id + collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower() + dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.BAIDU, collection_name)) + + return BaiduVector( + collection_name=collection_name, + config=BaiduConfig( + endpoint=dify_config.BAIDU_VECTOR_DB_ENDPOINT, + connection_timeout_in_mills=dify_config.BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS, + account=dify_config.BAIDU_VECTOR_DB_ACCOUNT, + api_key=dify_config.BAIDU_VECTOR_DB_API_KEY, + database=dify_config.BAIDU_VECTOR_DB_DATABASE, + shard=dify_config.BAIDU_VECTOR_DB_SHARD, + replicas=dify_config.BAIDU_VECTOR_DB_REPLICAS, + ), + ) diff --git a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py index 8d57855120..66bc31a4bf 100644 --- a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py +++ b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py @@ -1,5 +1,6 @@ import json import logging +import math from typing import Any, Optional from urllib.parse import urlparse @@ -76,7 +77,7 @@ class ElasticSearchVector(BaseVector): raise ValueError("Elasticsearch vector database version must be greater than 8.0.0") def get_type(self) -> str: - return "elasticsearch" + return VectorType.ELASTICSEARCH def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): uuids = self._get_uuids(documents) @@ -112,7 +113,8 @@ class ElasticSearchVector(BaseVector): def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: top_k = kwargs.get("top_k", 10) - knn = {"field": Field.VECTOR.value, "query_vector": query_vector, "k": top_k} + num_candidates = math.ceil(top_k * 1.5) + knn = {"field": Field.VECTOR.value, "query_vector": query_vector, "k": top_k, "num_candidates": num_candidates} results = self._client.search(index=self._collection_name, knn=knn, size=top_k) diff --git a/api/core/rag/datasource/vdb/vector_factory.py b/api/core/rag/datasource/vdb/vector_factory.py index 943b23870c..873b289027 100644 --- a/api/core/rag/datasource/vdb/vector_factory.py +++ b/api/core/rag/datasource/vdb/vector_factory.py @@ -103,6 +103,14 @@ class Vector: from core.rag.datasource.vdb.analyticdb.analyticdb_vector import AnalyticdbVectorFactory return AnalyticdbVectorFactory + case VectorType.BAIDU: + from core.rag.datasource.vdb.baidu.baidu_vector import BaiduVectorFactory + + return BaiduVectorFactory + case VectorType.VIKINGDB: + from core.rag.datasource.vdb.vikingdb.vikingdb_vector import VikingDBVectorFactory + + return VikingDBVectorFactory case _: raise ValueError(f"Vector store {vector_type} is not supported.") diff --git a/api/core/rag/datasource/vdb/vector_type.py b/api/core/rag/datasource/vdb/vector_type.py index ba04ea879d..b4d604a080 100644 --- a/api/core/rag/datasource/vdb/vector_type.py +++ b/api/core/rag/datasource/vdb/vector_type.py @@ -16,3 +16,5 @@ class VectorType(str, Enum): TENCENT = "tencent" ORACLE = "oracle" ELASTICSEARCH = "elasticsearch" + BAIDU = "baidu" + VIKINGDB = "vikingdb" diff --git a/api/core/rag/datasource/vdb/vikingdb/__init__.py b/api/core/rag/datasource/vdb/vikingdb/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py new file mode 100644 index 0000000000..22d0e92586 --- /dev/null +++ b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py @@ -0,0 +1,239 @@ +import json +from typing import Any + +from pydantic import BaseModel +from volcengine.viking_db import ( + Data, + DistanceType, + Field, + FieldType, + IndexType, + QuantType, + VectorIndexParams, + VikingDBService, +) + +from configs import dify_config +from core.rag.datasource.entity.embedding import Embeddings +from core.rag.datasource.vdb.field import Field as vdb_Field +from core.rag.datasource.vdb.vector_base import BaseVector +from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory +from core.rag.datasource.vdb.vector_type import VectorType +from core.rag.models.document import Document +from extensions.ext_redis import redis_client +from models.dataset import Dataset + + +class VikingDBConfig(BaseModel): + access_key: str + secret_key: str + host: str + region: str + scheme: str + connection_timeout: int + socket_timeout: int + index_type: str = IndexType.HNSW + distance: str = DistanceType.L2 + quant: str = QuantType.Float + + +class VikingDBVector(BaseVector): + def __init__(self, collection_name: str, group_id: str, config: VikingDBConfig): + super().__init__(collection_name) + self._group_id = group_id + self._client_config = config + self._index_name = f"{self._collection_name}_idx" + self._client = VikingDBService( + host=config.host, + region=config.region, + scheme=config.scheme, + connection_timeout=config.connection_timeout, + socket_timeout=config.socket_timeout, + ak=config.access_key, + sk=config.secret_key, + ) + + def _has_collection(self) -> bool: + try: + self._client.get_collection(self._collection_name) + except Exception: + return False + return True + + def _has_index(self) -> bool: + try: + self._client.get_index(self._collection_name, self._index_name) + except Exception: + return False + return True + + def _create_collection(self, dimension: int): + lock_name = f"vector_indexing_lock_{self._collection_name}" + with redis_client.lock(lock_name, timeout=20): + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" + if redis_client.get(collection_exist_cache_key): + return + + if not self._has_collection(): + fields = [ + Field(field_name=vdb_Field.PRIMARY_KEY.value, field_type=FieldType.String, is_primary_key=True), + Field(field_name=vdb_Field.METADATA_KEY.value, field_type=FieldType.String), + Field(field_name=vdb_Field.GROUP_KEY.value, field_type=FieldType.String), + Field(field_name=vdb_Field.CONTENT_KEY.value, field_type=FieldType.Text), + Field(field_name=vdb_Field.VECTOR.value, field_type=FieldType.Vector, dim=dimension), + ] + + self._client.create_collection( + collection_name=self._collection_name, + fields=fields, + description="Collection For Dify", + ) + + if not self._has_index(): + vector_index = VectorIndexParams( + distance=self._client_config.distance, + index_type=self._client_config.index_type, + quant=self._client_config.quant, + ) + + self._client.create_index( + collection_name=self._collection_name, + index_name=self._index_name, + vector_index=vector_index, + partition_by=vdb_Field.GROUP_KEY.value, + description="Index For Dify", + ) + redis_client.set(collection_exist_cache_key, 1, ex=3600) + + def get_type(self) -> str: + return VectorType.VIKINGDB + + def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): + dimension = len(embeddings[0]) + self._create_collection(dimension) + self.add_texts(texts, embeddings, **kwargs) + + def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + page_contents = [doc.page_content for doc in documents] + metadatas = [doc.metadata for doc in documents] + docs = [] + + for i, page_content in enumerate(page_contents): + metadata = {} + if metadatas is not None: + for key, val in metadatas[i].items(): + metadata[key] = val + doc = Data( + { + vdb_Field.PRIMARY_KEY.value: metadatas[i]["doc_id"], + vdb_Field.VECTOR.value: embeddings[i] if embeddings else None, + vdb_Field.CONTENT_KEY.value: page_content, + vdb_Field.METADATA_KEY.value: json.dumps(metadata), + vdb_Field.GROUP_KEY.value: self._group_id, + } + ) + docs.append(doc) + + self._client.get_collection(self._collection_name).upsert_data(docs) + + def text_exists(self, id: str) -> bool: + docs = self._client.get_collection(self._collection_name).fetch_data(id) + not_exists_str = "data does not exist" + if docs is not None and not_exists_str not in docs.fields.get("message", ""): + return True + return False + + def delete_by_ids(self, ids: list[str]) -> None: + self._client.get_collection(self._collection_name).delete_data(ids) + + def get_ids_by_metadata_field(self, key: str, value: str): + # Note: Metadata field value is an dict, but vikingdb field + # not support json type + results = self._client.get_index(self._collection_name, self._index_name).search( + filter={"op": "must", "field": vdb_Field.GROUP_KEY.value, "conds": [self._group_id]}, + # max value is 5000 + limit=5000, + ) + + if not results: + return [] + + ids = [] + for result in results: + metadata = result.fields.get(vdb_Field.METADATA_KEY.value) + if metadata is not None: + metadata = json.loads(metadata) + if metadata.get(key) == value: + ids.append(result.id) + return ids + + def delete_by_metadata_field(self, key: str, value: str) -> None: + ids = self.get_ids_by_metadata_field(key, value) + self.delete_by_ids(ids) + + def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + results = self._client.get_index(self._collection_name, self._index_name).search_by_vector( + query_vector, limit=kwargs.get("top_k", 50) + ) + score_threshold = float(kwargs.get("score_threshold") or 0.0) + return self._get_search_res(results, score_threshold) + + def _get_search_res(self, results, score_threshold): + if len(results) == 0: + return [] + + docs = [] + for result in results: + metadata = result.fields.get(vdb_Field.METADATA_KEY.value) + if metadata is not None: + metadata = json.loads(metadata) + if result.score > score_threshold: + metadata["score"] = result.score + doc = Document(page_content=result.fields.get(vdb_Field.CONTENT_KEY.value), metadata=metadata) + docs.append(doc) + docs = sorted(docs, key=lambda x: x.metadata["score"], reverse=True) + return docs + + def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: + return [] + + def delete(self) -> None: + if self._has_index(): + self._client.drop_index(self._collection_name, self._index_name) + if self._has_collection(): + self._client.drop_collection(self._collection_name) + + +class VikingDBVectorFactory(AbstractVectorFactory): + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> VikingDBVector: + if dataset.index_struct_dict: + class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] + collection_name = class_prefix.lower() + else: + dataset_id = dataset.id + collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower() + dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.VIKINGDB, collection_name)) + + if dify_config.VIKINGDB_ACCESS_KEY is None: + raise ValueError("VIKINGDB_ACCESS_KEY should not be None") + if dify_config.VIKINGDB_SECRET_KEY is None: + raise ValueError("VIKINGDB_SECRET_KEY should not be None") + if dify_config.VIKINGDB_HOST is None: + raise ValueError("VIKINGDB_HOST should not be None") + if dify_config.VIKINGDB_REGION is None: + raise ValueError("VIKINGDB_REGION should not be None") + if dify_config.VIKINGDB_SCHEME is None: + raise ValueError("VIKINGDB_SCHEME should not be None") + return VikingDBVector( + collection_name=collection_name, + group_id=dataset.id, + config=VikingDBConfig( + access_key=dify_config.VIKINGDB_ACCESS_KEY, + secret_key=dify_config.VIKINGDB_SECRET_KEY, + host=dify_config.VIKINGDB_HOST, + region=dify_config.VIKINGDB_REGION, + scheme=dify_config.VIKINGDB_SCHEME, + connection_timeout=dify_config.VIKINGDB_CONNECTION_TIMEOUT, + socket_timeout=dify_config.VIKINGDB_SOCKET_TIMEOUT, + ), + ) diff --git a/api/core/rag/entities/context_entities.py b/api/core/rag/entities/context_entities.py index dde3beccf6..cd18ad081f 100644 --- a/api/core/rag/entities/context_entities.py +++ b/api/core/rag/entities/context_entities.py @@ -1,3 +1,5 @@ +from typing import Optional + from pydantic import BaseModel @@ -7,4 +9,4 @@ class DocumentContext(BaseModel): """ content: str - score: float + score: Optional[float] = None diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index ae61ba7112..633e41d5cf 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -231,6 +231,9 @@ class DatasetRetrieval: source["content"] = segment.content retrieval_resource_list.append(source) if hit_callback and retrieval_resource_list: + retrieval_resource_list = sorted(retrieval_resource_list, key=lambda x: x.get("score"), reverse=True) + for position, item in enumerate(retrieval_resource_list, start=1): + item["position"] = position hit_callback.return_retriever_resource_info(retrieval_resource_list) if document_context_list: document_context_list = sorted(document_context_list, key=lambda x: x.score, reverse=True) @@ -536,7 +539,7 @@ class DatasetRetrieval: continue # pass if dataset is not available - if dataset and dataset.available_document_count == 0: + if dataset and dataset.provider != "external" and dataset.available_document_count == 0: continue available_datasets.append(dataset) diff --git a/api/core/tools/provider/builtin/cogview/tools/cogview3.py b/api/core/tools/provider/builtin/cogview/tools/cogview3.py index 085084ca38..12b4173fa4 100644 --- a/api/core/tools/provider/builtin/cogview/tools/cogview3.py +++ b/api/core/tools/provider/builtin/cogview/tools/cogview3.py @@ -1,7 +1,8 @@ import random from typing import Any, Union -from core.model_runtime.model_providers.zhipuai.zhipuai_sdk._client import ZhipuAI +from zhipuai import ZhipuAI + from core.tools.entities.tool_entities import ToolInvokeMessage from core.tools.tool.builtin_tool import BuiltinTool diff --git a/api/core/tools/provider/builtin/jina/jina.yaml b/api/core/tools/provider/builtin/jina/jina.yaml index 346175c41f..af3ca23ffa 100644 --- a/api/core/tools/provider/builtin/jina/jina.yaml +++ b/api/core/tools/provider/builtin/jina/jina.yaml @@ -6,9 +6,9 @@ identity: zh_Hans: Jina AI pt_BR: Jina AI description: - en_US: Convert any URL to an LLM-friendly input or perform searches on the web for grounding information. Experience improved output for your agent and RAG systems at no cost. - zh_Hans: 将任何URL转换为LLM易读的输入或在网页上搜索引擎上搜索引擎。 - pt_BR: Converte qualquer URL em uma entrada LLm-fácil de ler ou realize pesquisas na web para obter informação de grounding. Tenha uma experiência melhor para seu agente e sistemas RAG sem custo. + en_US: Your Search Foundation, Supercharged! + zh_Hans: 您的搜索底座,从此不同! + pt_BR: Your Search Foundation, Supercharged! icon: icon.svg tags: - search diff --git a/api/core/tools/provider/builtin/vanna/tools/vanna.py b/api/core/tools/provider/builtin/vanna/tools/vanna.py index c90d766e48..2443991d57 100644 --- a/api/core/tools/provider/builtin/vanna/tools/vanna.py +++ b/api/core/tools/provider/builtin/vanna/tools/vanna.py @@ -111,9 +111,10 @@ class VannaTool(BuiltinTool): # with "visualize" set to True (default behavior) leads to remote code execution. # Affected versions: <= 0.5.5 ######################################################################################### - generate_chart = False - # generate_chart = tool_parameters.get("generate_chart", True) - res = vn.ask(prompt, False, True, generate_chart) + allow_llm_to_see_data = tool_parameters.get("allow_llm_to_see_data", False) + res = vn.ask( + prompt, print_results=False, auto_train=True, visualize=False, allow_llm_to_see_data=allow_llm_to_see_data + ) result = [] diff --git a/api/core/tools/provider/builtin/vanna/tools/vanna.yaml b/api/core/tools/provider/builtin/vanna/tools/vanna.yaml index ae2eae94c4..12ca8a862e 100644 --- a/api/core/tools/provider/builtin/vanna/tools/vanna.yaml +++ b/api/core/tools/provider/builtin/vanna/tools/vanna.yaml @@ -200,14 +200,14 @@ parameters: en_US: If enabled, it will attempt to train on the metadata of that database zh_Hans: 是否自动从数据库获取元数据来训练 form: form - - name: generate_chart + - name: allow_llm_to_see_data type: boolean required: false - default: True + default: false label: - en_US: Generate Charts - zh_Hans: 生成图表 + en_US: Whether to allow the LLM to see the data + zh_Hans: 是否允许LLM查看数据 human_description: - en_US: Generate Charts - zh_Hans: 是否生成图表 + en_US: Whether to allow the LLM to see the data + zh_Hans: 是否允许LLM查看数据 form: form diff --git a/api/core/tools/tool/dataset_retriever/dataset_retriever_tool.py b/api/core/tools/tool/dataset_retriever/dataset_retriever_tool.py index 8dc60408c9..987f94a350 100644 --- a/api/core/tools/tool/dataset_retriever/dataset_retriever_tool.py +++ b/api/core/tools/tool/dataset_retriever/dataset_retriever_tool.py @@ -1,10 +1,12 @@ from pydantic import BaseModel, Field from core.rag.datasource.retrieval_service import RetrievalService +from core.rag.models.document import Document as RetrievalDocument from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.tools.tool.dataset_retriever.dataset_retriever_base_tool import DatasetRetrieverBaseTool from extensions.ext_database import db from models.dataset import Dataset, Document, DocumentSegment +from services.external_knowledge_service import ExternalDatasetService default_retrieval_model = { "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, @@ -53,97 +55,137 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool): for hit_callback in self.hit_callbacks: hit_callback.on_query(query, dataset.id) - - # get retrieval model , if the model is not setting , using default - retrieval_model = dataset.retrieval_model or default_retrieval_model - if dataset.indexing_technique == "economy": - # use keyword table query - documents = RetrievalService.retrieve( - retrieval_method="keyword_search", dataset_id=dataset.id, query=query, top_k=self.top_k + if dataset.provider == "external": + results = [] + external_documents = ExternalDatasetService.fetch_external_knowledge_retrieval( + tenant_id=dataset.tenant_id, + dataset_id=dataset.id, + query=query, + external_retrieval_parameters=dataset.retrieval_model, ) - return str("\n".join([document.page_content for document in documents])) - else: - if self.top_k > 0: - # retrieval source - documents = RetrievalService.retrieve( - retrieval_method=retrieval_model.get("search_method", "semantic_search"), - dataset_id=dataset.id, - query=query, - top_k=self.top_k, - score_threshold=retrieval_model.get("score_threshold", 0.0) - if retrieval_model["score_threshold_enabled"] - else 0.0, - reranking_model=retrieval_model.get("reranking_model", None) - if retrieval_model["reranking_enable"] - else None, - reranking_mode=retrieval_model.get("reranking_mode") or "reranking_model", - weights=retrieval_model.get("weights", None), + for external_document in external_documents: + document = RetrievalDocument( + page_content=external_document.get("content"), + metadata=external_document.get("metadata"), + provider="external", ) - else: - documents = [] - + document.metadata["score"] = external_document.get("score") + document.metadata["title"] = external_document.get("title") + document.metadata["dataset_id"] = dataset.id + document.metadata["dataset_name"] = dataset.name + results.append(document) + # deal with external documents + context_list = [] + for position, item in enumerate(results, start=1): + source = { + "position": position, + "dataset_id": item.metadata.get("dataset_id"), + "dataset_name": item.metadata.get("dataset_name"), + "document_name": item.metadata.get("title"), + "data_source_type": "external", + "retriever_from": self.retriever_from, + "score": item.metadata.get("score"), + "title": item.metadata.get("title"), + "content": item.page_content, + } + context_list.append(source) for hit_callback in self.hit_callbacks: - hit_callback.on_tool_end(documents) - document_score_list = {} - if dataset.indexing_technique != "economy": - for item in documents: - if item.metadata.get("score"): - document_score_list[item.metadata["doc_id"]] = item.metadata["score"] - document_context_list = [] - index_node_ids = [document.metadata["doc_id"] for document in documents] - segments = DocumentSegment.query.filter( - DocumentSegment.dataset_id == self.dataset_id, - DocumentSegment.completed_at.isnot(None), - DocumentSegment.status == "completed", - DocumentSegment.enabled == True, - DocumentSegment.index_node_id.in_(index_node_ids), - ).all() + hit_callback.return_retriever_resource_info(context_list) - if segments: - index_node_id_to_position = {id: position for position, id in enumerate(index_node_ids)} - sorted_segments = sorted( - segments, key=lambda segment: index_node_id_to_position.get(segment.index_node_id, float("inf")) + return str("\n".join([item.page_content for item in results])) + else: + # get retrieval model , if the model is not setting , using default + retrieval_model = dataset.retrieval_model or default_retrieval_model + if dataset.indexing_technique == "economy": + # use keyword table query + documents = RetrievalService.retrieve( + retrieval_method="keyword_search", dataset_id=dataset.id, query=query, top_k=self.top_k ) - for segment in sorted_segments: - if segment.answer: - document_context_list.append(f"question:{segment.get_sign_content()} answer:{segment.answer}") - else: - document_context_list.append(segment.get_sign_content()) - if self.return_resource: - context_list = [] - resource_number = 1 + return str("\n".join([document.page_content for document in documents])) + else: + if self.top_k > 0: + # retrieval source + documents = RetrievalService.retrieve( + retrieval_method=retrieval_model.get("search_method", "semantic_search"), + dataset_id=dataset.id, + query=query, + top_k=self.top_k, + score_threshold=retrieval_model.get("score_threshold", 0.0) + if retrieval_model["score_threshold_enabled"] + else 0.0, + reranking_model=retrieval_model.get("reranking_model", None) + if retrieval_model["reranking_enable"] + else None, + reranking_mode=retrieval_model.get("reranking_mode") or "reranking_model", + weights=retrieval_model.get("weights", None), + ) + else: + documents = [] + + for hit_callback in self.hit_callbacks: + hit_callback.on_tool_end(documents) + document_score_list = {} + if dataset.indexing_technique != "economy": + for item in documents: + if item.metadata.get("score"): + document_score_list[item.metadata["doc_id"]] = item.metadata["score"] + document_context_list = [] + index_node_ids = [document.metadata["doc_id"] for document in documents] + segments = DocumentSegment.query.filter( + DocumentSegment.dataset_id == self.dataset_id, + DocumentSegment.completed_at.isnot(None), + DocumentSegment.status == "completed", + DocumentSegment.enabled == True, + DocumentSegment.index_node_id.in_(index_node_ids), + ).all() + + if segments: + index_node_id_to_position = {id: position for position, id in enumerate(index_node_ids)} + sorted_segments = sorted( + segments, key=lambda segment: index_node_id_to_position.get(segment.index_node_id, float("inf")) + ) for segment in sorted_segments: - context = {} - document = Document.query.filter( - Document.id == segment.document_id, - Document.enabled == True, - Document.archived == False, - ).first() - if dataset and document: - source = { - "position": resource_number, - "dataset_id": dataset.id, - "dataset_name": dataset.name, - "document_id": document.id, - "document_name": document.name, - "data_source_type": document.data_source_type, - "segment_id": segment.id, - "retriever_from": self.retriever_from, - "score": document_score_list.get(segment.index_node_id, None), - } - if self.retriever_from == "dev": - source["hit_count"] = segment.hit_count - source["word_count"] = segment.word_count - source["segment_position"] = segment.position - source["index_node_hash"] = segment.index_node_hash - if segment.answer: - source["content"] = f"question:{segment.content} \nanswer:{segment.answer}" - else: - source["content"] = segment.content - context_list.append(source) - resource_number += 1 + if segment.answer: + document_context_list.append( + f"question:{segment.get_sign_content()} answer:{segment.answer}" + ) + else: + document_context_list.append(segment.get_sign_content()) + if self.return_resource: + context_list = [] + resource_number = 1 + for segment in sorted_segments: + context = {} + document = Document.query.filter( + Document.id == segment.document_id, + Document.enabled == True, + Document.archived == False, + ).first() + if dataset and document: + source = { + "position": resource_number, + "dataset_id": dataset.id, + "dataset_name": dataset.name, + "document_id": document.id, + "document_name": document.name, + "data_source_type": document.data_source_type, + "segment_id": segment.id, + "retriever_from": self.retriever_from, + "score": document_score_list.get(segment.index_node_id, None), + } + if self.retriever_from == "dev": + source["hit_count"] = segment.hit_count + source["word_count"] = segment.word_count + source["segment_position"] = segment.position + source["index_node_hash"] = segment.index_node_hash + if segment.answer: + source["content"] = f"question:{segment.content} \nanswer:{segment.answer}" + else: + source["content"] = segment.content + context_list.append(source) + resource_number += 1 - for hit_callback in self.hit_callbacks: - hit_callback.return_retriever_resource_info(context_list) + for hit_callback in self.hit_callbacks: + hit_callback.return_retriever_resource_info(context_list) - return str("\n".join(document_context_list)) + return str("\n".join(document_context_list)) diff --git a/api/core/tools/utils/parser.py b/api/core/tools/utils/parser.py index 0d801d36c4..5867a11bb3 100644 --- a/api/core/tools/utils/parser.py +++ b/api/core/tools/utils/parser.py @@ -288,7 +288,7 @@ class ApiBasedToolSchemaParser: @staticmethod def auto_parse_to_tool_bundle( - content: str, extra_info: Optional[dict], warning: Optional[dict] + content: str, extra_info: Optional[dict] = None, warning: Optional[dict] = None ) -> tuple[list[ApiToolBundle], str]: """ auto parse to tool bundle diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index 0caf99a963..0b3e9bd6a8 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -79,8 +79,9 @@ class KnowledgeRetrievalNode(BaseNode): results = ( db.session.query(Dataset) - .join(subquery, Dataset.id == subquery.c.dataset_id) + .outerjoin(subquery, Dataset.id == subquery.c.dataset_id) .filter(Dataset.tenant_id == self.tenant_id, Dataset.id.in_(dataset_ids)) + .filter((subquery.c.available_document_count > 0) | (Dataset.provider == "external")) .all() ) @@ -121,10 +122,13 @@ class KnowledgeRetrievalNode(BaseNode): ) elif node_data.retrieval_mode == DatasetRetrieveConfigEntity.RetrieveStrategy.MULTIPLE.value: if node_data.multiple_retrieval_config.reranking_mode == "reranking_model": - reranking_model = { - "reranking_provider_name": node_data.multiple_retrieval_config.reranking_model.provider, - "reranking_model_name": node_data.multiple_retrieval_config.reranking_model.model, - } + if node_data.multiple_retrieval_config.reranking_model: + reranking_model = { + "reranking_provider_name": node_data.multiple_retrieval_config.reranking_model.provider, + "reranking_model_name": node_data.multiple_retrieval_config.reranking_model.model, + } + else: + reranking_model = None weights = None elif node_data.multiple_retrieval_config.reranking_mode == "weighted_score": reranking_model = None diff --git a/api/extensions/ext_storage.py b/api/extensions/ext_storage.py index be57b633be..f90629262d 100644 --- a/api/extensions/ext_storage.py +++ b/api/extensions/ext_storage.py @@ -56,6 +56,10 @@ class Storage: from extensions.storage.volcengine_tos_storage import VolcengineTosStorage return VolcengineTosStorage + case StorageType.SUPBASE: + from extensions.storage.supabase_storage import SupabaseStorage + + return SupabaseStorage case StorageType.LOCAL | _: from extensions.storage.local_fs_storage import LocalFsStorage diff --git a/api/extensions/storage/aws_s3_storage.py b/api/extensions/storage/aws_s3_storage.py index fede683aa7..38f823763f 100644 --- a/api/extensions/storage/aws_s3_storage.py +++ b/api/extensions/storage/aws_s3_storage.py @@ -1,3 +1,4 @@ +import logging from collections.abc import Generator from contextlib import closing @@ -8,6 +9,8 @@ from flask import Flask from extensions.storage.base_storage import BaseStorage +logger = logging.getLogger(__name__) + class AwsS3Storage(BaseStorage): """Implementation for Amazon Web Services S3 storage.""" @@ -17,9 +20,14 @@ class AwsS3Storage(BaseStorage): app_config = self.app.config self.bucket_name = app_config.get("S3_BUCKET_NAME") if app_config.get("S3_USE_AWS_MANAGED_IAM"): + logger.info("Using AWS managed IAM role for S3") + session = boto3.Session() - self.client = session.client("s3") + region_name = app_config.get("S3_REGION") + self.client = session.client(service_name="s3", region_name=region_name) else: + logger.info("Using ak and sk for S3") + self.client = boto3.client( "s3", aws_secret_access_key=app_config.get("S3_SECRET_KEY"), diff --git a/api/extensions/storage/storage_type.py b/api/extensions/storage/storage_type.py index e494a520a2..415bf251f6 100644 --- a/api/extensions/storage/storage_type.py +++ b/api/extensions/storage/storage_type.py @@ -12,3 +12,4 @@ class StorageType(str, Enum): S3 = "s3" TENCENT_COS = "tencent-cos" VOLCENGINE_TOS = "volcengine-tos" + SUPBASE = "supabase" diff --git a/api/extensions/storage/supabase_storage.py b/api/extensions/storage/supabase_storage.py new file mode 100644 index 0000000000..1e399f87c8 --- /dev/null +++ b/api/extensions/storage/supabase_storage.py @@ -0,0 +1,60 @@ +import io +from collections.abc import Generator +from pathlib import Path + +from flask import Flask +from supabase import Client + +from extensions.storage.base_storage import BaseStorage + + +class SupabaseStorage(BaseStorage): + """Implementation for supabase obs storage.""" + + def __init__(self, app: Flask): + super().__init__(app) + app_config = self.app.config + self.bucket_name = app_config.get("SUPABASE_BUCKET_NAME") + self.client = Client( + supabase_url=app_config.get("SUPABASE_URL"), supabase_key=app_config.get("SUPABASE_API_KEY") + ) + self.create_bucket( + id=app_config.get("SUPABASE_BUCKET_NAME"), bucket_name=app_config.get("SUPABASE_BUCKET_NAME") + ) + + def create_bucket(self, id, bucket_name): + if not self.bucket_exists(): + self.client.storage.create_bucket(id=id, name=bucket_name) + + def save(self, filename, data): + self.client.storage.from_(self.bucket_name).upload(filename, data) + + def load_once(self, filename: str) -> bytes: + content = self.client.storage.from_(self.bucket_name).download(filename) + return content + + def load_stream(self, filename: str) -> Generator: + def generate(filename: str = filename) -> Generator: + result = self.client.storage.from_(self.bucket_name).download(filename) + byte_stream = io.BytesIO(result) + while chunk := byte_stream.read(4096): # Read in chunks of 4KB + yield chunk + + return generate() + + def download(self, filename, target_filepath): + result = self.client.storage.from_(self.bucket_name).download(filename) + Path(result).write_bytes(result) + + def exists(self, filename): + result = self.client.storage.from_(self.bucket_name).list(filename) + if result.count() > 0: + return True + return False + + def delete(self, filename): + self.client.storage.from_(self.bucket_name).remove(filename) + + def bucket_exists(self): + buckets = self.client.storage.list_buckets() + return any(bucket.name == self.bucket_name for bucket in buckets) diff --git a/api/libs/helper.py b/api/libs/helper.py index 9c3a1ff04d..d8a8e7f411 100644 --- a/api/libs/helper.py +++ b/api/libs/helper.py @@ -162,7 +162,7 @@ def generate_string(n): return result -def get_remote_ip(request) -> str: +def extract_remote_ip(request) -> str: if request.headers.get("CF-Connecting-IP"): return request.headers.get("Cf-Connecting-Ip") elif request.headers.getlist("X-Forwarded-For"): diff --git a/api/libs/oauth_data_source.py b/api/libs/oauth_data_source.py index 05a73b09b7..e747ea97ad 100644 --- a/api/libs/oauth_data_source.py +++ b/api/libs/oauth_data_source.py @@ -1,3 +1,4 @@ +import datetime import urllib.parse import requests @@ -69,6 +70,7 @@ class NotionOAuth(OAuthDataSource): if data_source_binding: data_source_binding.source_info = source_info data_source_binding.disabled = False + data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None) db.session.commit() else: new_data_source_binding = DataSourceOauthBinding( @@ -104,6 +106,7 @@ class NotionOAuth(OAuthDataSource): if data_source_binding: data_source_binding.source_info = source_info data_source_binding.disabled = False + data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None) db.session.commit() else: new_data_source_binding = DataSourceOauthBinding( @@ -138,6 +141,7 @@ class NotionOAuth(OAuthDataSource): } data_source_binding.source_info = new_source_info data_source_binding.disabled = False + data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None) db.session.commit() else: raise ValueError("Data source binding not found") diff --git a/api/poetry.lock b/api/poetry.lock index 6b96350ffd..f1c5d949fe 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -293,13 +293,13 @@ alibabacloud-tea = "*" [[package]] name = "alibabacloud-tea" -version = "0.3.10" +version = "0.4.0" description = "The tea module of alibabaCloud Python SDK." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "alibabacloud-tea-0.3.10.tar.gz", hash = "sha256:bcf972416af5d8b5e671078c2ec20296dbc792e85e68acd685730a0a016afd2a"}, - {file = "alibabacloud_tea-0.3.10-py3-none-any.whl", hash = "sha256:9136f302a3baea8a1528f500bf5d47c3727b827a09b5c14b283ca53578e30082"}, + {file = "alibabacloud-tea-0.4.0.tar.gz", hash = "sha256:bdf72d747723bab190331b3c8593109fe2807504469bc0147f78c8c4945ed396"}, + {file = "alibabacloud_tea-0.4.0-py3-none-any.whl", hash = "sha256:59fae5765e6654f884e130233df6fb61ca0fbe01a29ed0755a1cf099a3d4d863"}, ] [package.dependencies] @@ -364,12 +364,12 @@ alibabacloud-tea = ">=0.0.1" [[package]] name = "aliyun-python-sdk-core" -version = "2.15.2" +version = "2.16.0" description = "The core module of Aliyun Python SDK." optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "aliyun-python-sdk-core-2.15.2.tar.gz", hash = "sha256:54f66a53e193c61c5e16ea4505a0cab43543f8ad2ef22833f69c4d5e5151c17d"}, + {file = "aliyun-python-sdk-core-2.16.0.tar.gz", hash = "sha256:651caad597eb39d4fad6cf85133dffe92837d53bdf62db9d8f37dab6508bb8f9"}, ] [package.dependencies] @@ -698,13 +698,13 @@ msrest = ">=0.6.21" [[package]] name = "azure-storage-file-share" -version = "12.18.0" +version = "12.19.0" description = "Microsoft Azure Azure File Share Storage Client Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure_storage_file_share-12.18.0-py3-none-any.whl", hash = "sha256:23ca35206a0cb8af0decd9d1363d0ad8ab31584a5e55cfc64528d7192ff748a9"}, - {file = "azure_storage_file_share-12.18.0.tar.gz", hash = "sha256:0a81daee5e13598accde3c73b1aeccc6edfdcec5e957bf40150c0622fb95dc76"}, + {file = "azure_storage_file_share-12.19.0-py3-none-any.whl", hash = "sha256:eac6cf1a454aba58af4e6ba450b36d16aa1d0c49679fb64ea8756bb896698c5b"}, + {file = "azure_storage_file_share-12.19.0.tar.gz", hash = "sha256:ea7a4174dc6c52f50ac8c30f228159fcc3675d1f8ba771b8d0efcbc310740278"}, ] [package.dependencies] @@ -844,13 +844,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.34" +version = "1.35.38" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.34-py3-none-any.whl", hash = "sha256:ccb0fe397b11b81c9abc0c87029d17298e17bf658d8db5c0c5a551a12a207e7a"}, - {file = "botocore-1.35.34.tar.gz", hash = "sha256:789b6501a3bb4a9591c1fe10da200cc315c1fa5df5ada19c720d8ef06439b3e3"}, + {file = "botocore-1.35.38-py3-none-any.whl", hash = "sha256:2eb17d32fa2d3bb5d475132a83564d28e3acc2161534f24b75a54418a1d51359"}, + {file = "botocore-1.35.38.tar.gz", hash = "sha256:55d9305c44e5ba29476df456120fa4fb919f03f066afa82f2ae400485e7465f4"}, ] [package.dependencies] @@ -1257,101 +1257,116 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] @@ -1613,128 +1628,6 @@ pandas = ["pandas"] sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"] tzlocal = ["tzlocal (>=4.0)"] -[[package]] -name = "clickhouse-driver" -version = "0.2.9" -description = "Python driver with native interface for ClickHouse" -optional = false -python-versions = "<4,>=3.7" -files = [ - {file = "clickhouse-driver-0.2.9.tar.gz", hash = "sha256:050ea4870ead993910b39e7fae965dc1c347b2e8191dcd977cd4b385f9e19f87"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ce04e9d0d0f39561f312d1ac1a8147bc9206e4267e1a23e20e0423ebac95534"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7ae5c8931bf290b9d85582e7955b9aad7f19ff9954e48caa4f9a180ea4d01078"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e51792f3bd12c32cb15a907f12de3c9d264843f0bb33dce400e3966c9f09a3f"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42fc546c31e4a04c97b749769335a679c9044dc693fa7a93e38c97fd6727173d"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a383a403d185185c64e49edd6a19b2ec973c5adcb8ebff7ed2fc539a2cc65a5"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f05321a97e816afc75b3e4f9eda989848fecf14ecf1a91d0f22c04258123d1f7"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be47e793846aac28442b6b1c6554e0731b848a5a7759a54aa2489997354efe4a"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:780e42a215d1ae2f6d695d74dd6f087781fb2fa51c508b58f79e68c24c5364e0"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9e28f1fe850675e173db586e9f1ac790e8f7edd507a4227cd54cd7445f8e75b6"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:125aae7f1308d3083dadbb3c78f828ae492e060f13e4007a0cf53a8169ed7b39"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2f3c4fbb61e75c62a1ab93a1070d362de4cb5682f82833b2c12deccb3bae888d"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dc03196a84e32d23b88b665be69afae98f57426f5fdf203e16715b756757961"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-win32.whl", hash = "sha256:25695d78a1d7ad6e221e800612eac08559f6182bf6dee0a220d08de7b612d993"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-win_amd64.whl", hash = "sha256:367acac95398d721a0a2a6cf87e93638c5588b79498a9848676ce7f182540a6c"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a7353a7a08eee3aa0001d8a5d771cb1f37e2acae1b48178002431f23892121a"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6af1c6cbc3481205503ab72a34aa76d6519249c904aa3f7a84b31e7b435555be"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48033803abd1100bfff6b9a1769d831b672cd3cda5147e0323b956fd1416d38d"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f202a58a540c85e47c31dabc8f84b6fe79dca5315c866450a538d58d6fa0571"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4df50fd84bfa4aa1eb7b52d48136066bfb64fabb7ceb62d4c318b45a296200b"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:433a650571a0d7766eb6f402e8f5930222997686c2ee01ded22f1d8fd46af9d4"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:232ee260475611cbf7adb554b81db6b5790b36e634fe2164f4ffcd2ca3e63a71"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:09049f7e71f15c9c9a03f597f77fc1f7b61ababd155c06c0d9e64d1453d945d7"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:424153d1d5f5a807f596a48cc88119f9fb3213ca7e38f57b8d15dcc964dd91f7"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4f078fd1cf19c4ca63b8d1e0803df665310c8d5b644c5b02bf2465e8d6ef8f55"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f138d939e26e767537f891170b69a55a88038919f5c10d8865b67b8777fe4848"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9aafabc7e32942f85dcb46f007f447ab69024831575df97cae28c6ed127654d1"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-win32.whl", hash = "sha256:935e16ebf1a1998d8493979d858821a755503c9b8af572d9c450173d4b88868c"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-win_amd64.whl", hash = "sha256:306b3102cba278b5dfec6f5f7dc8b78416c403901510475c74913345b56c9e42"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fcb2fd00e58650ae206a6d5dbc83117240e622471aa5124733fbf2805eb8bda0"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7a3e6b0a1eb218e3d870a94c76daaf65da46dca8f6888ea6542f94905c24d88"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8d8e2888a857d8db3d98765a5ad23ab561241feaef68bbffc5a0bd9c142342"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85d50c011467f5ff6772c4059345968b854b72e07a0219030b7c3f68419eb7f7"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:93b395c1370629ccce8fb3e14cd5be2646d227bd32018c21f753c543e9a7e96b"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dbcee870c60d9835e5dce1456ab6b9d807e6669246357f4b321ef747b90fa43"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fffa5a5f317b1ec92e406a30a008929054cf3164d2324a3c465d0a0330273bf8"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:476702740a279744badbd177ae1c4a2d089ec128bd676861219d1f92078e4530"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5cd6d95fab5ff80e9dc9baedc9a926f62f74072d42d5804388d63b63bec0bb63"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:05027d32d7cf3e46cb8d04f8c984745ae01bd1bc7b3579f9dadf9b3cca735697"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:3d11831842250b4c1b26503a6e9c511fc03db096608b7c6af743818c421a3032"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:81b4b671b785ebb0b8aeabf2432e47072413d81db959eb8cfd8b6ab58c5799c6"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-win32.whl", hash = "sha256:e893bd4e014877174a59e032b0e99809c95ec61328a0e6bd9352c74a2f6111a8"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-win_amd64.whl", hash = "sha256:de6624e28eeffd01668803d28ae89e3d4e359b1bff8b60e4933e1cb3c6f86f18"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:909205324089a9ee59bee7ecbfa94595435118cca310fd62efdf13f225aa2965"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03f31d6e47dc2b0f367f598f5629147ed056d7216c1788e25190fcfbfa02e749"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed84179914b2b7bb434c2322a6e7fd83daa681c97a050450511b66d917a129bb"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:67d1bf63efb4ba14ae6c6da99622e4a549e68fc3ee14d859bf611d8e6a61b3fa"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eed23ea41dd582d76f7a2ec7e09cbe5e9fec008f11a4799fa35ce44a3ebd283"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a654291132766efa2703058317749d7c69b69f02d89bac75703eaf7f775e20da"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1c26c5ef16d0ef3cabc5bc03e827e01b0a4afb5b4eaf8850b7cf740cee04a1d4"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b57e83d7986d3cbda6096974a9510eb53cb33ad9072288c87c820ba5eee3370e"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:153cc03b36f22cbde55aa6a5bbe99072a025567a54c48b262eb0da15d8cd7c83"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:83a857d99192936091f495826ae97497cd1873af213b1e069d56369fb182ab8e"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb05a9bb22cbe9ad187ad268f86adf7e60df6083331fe59c01571b7b725212dd"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-win32.whl", hash = "sha256:3e282c5c25e32d96ed151e5460d2bf4ecb805ea64449197dd918e84e768016df"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-win_amd64.whl", hash = "sha256:c46dccfb04a9afd61a1b0e60bfefceff917f76da2c863f9b36b39248496d5c77"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:612ca9028c718f362c97f552e63d313cf1a70a616ef8532ddb0effdaf12ebef9"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471b884d318e012f68d858476052742048918854f7dfe87d78e819f87a848ffb"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58ee63c35e99da887eb035c8d6d9e64fd298a0efc1460395297dd5cc281a6912"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0819bb63d2c5025a1fb9589f57ef82602687cef11081d6dfa6f2ce44606a1772"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6680ee18870bca1fbab1736c8203a965efaec119ab4c37821ad99add248ee08"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:713c498741b54debd3a10a5529e70b6ed85ca33c3e8629e24ae5cd8160b5a5f2"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:730837b8f63941065c9c955c44286aef0987fb084ffb3f55bf1e4fe07df62269"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9f4e38b2ea09214c8e7848a19391009a18c56a3640e1ba1a606b9e57aeb63404"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:457f1d6639e0345b717ae603c79bd087a35361ce68c1c308d154b80b841e5e7d"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:49a55aeb8ea625a87965a96e361bbb1ad67d0931bfb2a575f899c1064e70c2da"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9230058d8c9b1a04079afae4650fb67745f0f1c39db335728f64d48bd2c19246"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8798258bd556542dd9c6b8ebe62f9c5110c9dcdf97c57fb077e7b8b6d6da0826"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-win32.whl", hash = "sha256:ce8e3f4be46bcc63555863f70ab0035202b082b37e6f16876ef50e7bc4b47056"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-win_amd64.whl", hash = "sha256:2d982959ff628255808d895a67493f2dab0c3a9bfc65eeda0f00c8ae9962a1b3"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a46b227fab4420566ed24ee70d90076226d16fcf09c6ad4d428717efcf536446"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7eaa2ce5ea08cf5fddebb8c274c450e102f329f9e6966b6cd85aa671c48e5552"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f97f0083194d6e23b5ef6156ed0d5388c37847b298118199d7937ba26412a9e2"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6cab5cdbb0f8ee51d879d977b78f07068b585225ac656f3c081896c362e8f83"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdb1b011a53ee71539e9dc655f268b111bac484db300da92829ed59e910a8fd0"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bf51bb761b281d20910b4b689c699ef98027845467daa5bb5dfdb53bd6ee404"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8ea462e3cebb121ff55002e9c8a9a0a3fd9b5bbbf688b4960f0a83c0172fb31"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:70bee21c245226ad0d637bf470472e2d487b86911b6d673a862127b934336ff4"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:253a3c223b944d691bf0abbd599f592ea3b36f0a71d2526833b1718f37eca5c2"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a6549b53fc5c403dc556cb39b2ae94d73f9b113daa00438a660bb1dd5380ae4d"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1c685cd4abe61af1c26279ff04b9f567eb4d6c1ec7fb265af7481b1f153043aa"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7e25144219577491929d032a6c3ddd63c6cd7fa764af829a5637f798190d9b26"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-win32.whl", hash = "sha256:0b9925610d25405a8e6d83ff4f54fc2456a121adb0155999972f5edd6ba3efc8"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-win_amd64.whl", hash = "sha256:b243de483cfa02716053b0148d73558f4694f3c27b97fc1eaa97d7079563a14d"}, - {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:45a3d5b1d06750fd6a18c29b871494a2635670099ec7693e756a5885a4a70dbf"}, - {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8415ffebd6ca9eef3024763abc450f8659f1716d015bd563c537d01c7fbc3569"}, - {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace48db993aa4bd31c42de0fa8d38c94ad47405916d6b61f7a7168a48fb52ac1"}, - {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b07123334fe143bfe6fa4e3d4b732d647d5fd2cfb9ec7f2f76104b46fe9d20c6"}, - {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2af3efa73d296420ce6362789f5b1febf75d4aa159a479393f01549115509d5"}, - {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:baf57eede88d07a1eb04352d26fc58a4d97991ca3d8840f7c5d48691dec9f251"}, - {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:275d0ccdab9c3571bdb3e9acfab4497930aa584ff2766b035bb2f854deaf8b82"}, - {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:293da77bfcac3168fb35b27c242f97c1a05502435c0686ecbb8e2e4abcb3de26"}, - {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d6c2e5830705e4eeef33070ca4d5a24dfa221f28f2f540e5e6842c26e70b10b"}, - {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:11934bd78d97dd7e1a23a6222b5edd1e1b4d34e1ead5c846dc2b5c56fdc35ff5"}, - {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b802b6f0fbdcc3ab81b87f09b694dde91ab049f44d1d2c08c3dc8ea9a5950cfa"}, - {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7af871c5315eb829ecf4533c790461ea8f73b3bfd5f533b0467e479fdf6ddcfd"}, - {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d577dd4867b9e26cf60590e1f500990c8701a6e3cfbb9e644f4d0c0fb607028"}, - {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ed3dea2d1eca85fef5b8564ddd76dedb15a610c77d55d555b49d9f7c896b64b"}, - {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:91ec96f2c48e5bdeac9eea43a9bc9cc19acb2d2c59df0a13d5520dfc32457605"}, - {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7667ab423452754f36ba8fb41e006a46baace9c94e2aca2a745689b9f2753dfb"}, - {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:653583b1f3b088d106f180d6f02c90917ecd669ec956b62903a05df4a7f44863"}, - {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef3dd0cbdf2f0171caab90389af0ede068ec802bf46c6a77f14e6edc86671bc"}, - {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11b1833ee8ff8d5df39a34a895e060b57bd81e05ea68822bc60476daff4ce1c8"}, - {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8a3195639e6393b9d4aafe736036881ff86b6be5855d4bf7d9f5c31637181ec3"}, -] - -[package.dependencies] -pytz = "*" -tzlocal = "*" - -[package.extras] -lz4 = ["clickhouse-cityhash (>=1.0.2.1)", "lz4", "lz4 (<=3.0.1)"] -numpy = ["numpy (>=1.12.0)", "pandas (>=0.24.0)"] -zstd = ["clickhouse-cityhash (>=1.0.2.1)", "zstd"] - [[package]] name = "cloudpickle" version = "2.2.1" @@ -1995,43 +1888,38 @@ files = [ [[package]] name = "cryptography" -version = "42.0.8" +version = "43.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, - {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, - {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, - {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, - {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, - {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, - {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, + {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, + {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, + {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, + {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, + {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, + {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, + {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, ] [package.dependencies] @@ -2044,7 +1932,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -2107,17 +1995,6 @@ dev = ["Sphinx (==5.3.0)", "bump2version (==1.0.1)", "coverage (>=6.2)", "datacl timedelta = ["pytimeparse (>=1.1.7)"] yaml = ["PyYAML (>=5.3)"] -[[package]] -name = "dataclasses" -version = "0.6" -description = "A backport of the dataclasses module for Python 3.6" -optional = false -python-versions = "*" -files = [ - {file = "dataclasses-0.6-py3-none-any.whl", hash = "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f"}, - {file = "dataclasses-0.6.tar.gz", hash = "sha256:6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84"}, -] - [[package]] name = "dataclasses-json" version = "0.6.7" @@ -2150,6 +2027,17 @@ packaging = ">=17.0" pandas = ">=0.24.2" pyarrow = ">=3.0.0" +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + [[package]] name = "defusedxml" version = "0.7.1" @@ -2178,6 +2066,20 @@ wrapt = ">=1.10,<2" [package.extras] dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] +[[package]] +name = "deprecation" +version = "2.1.0" +description = "A library to handle automated deprecations" +optional = false +python-versions = "*" +files = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] + +[package.dependencies] +packaging = "*" + [[package]] name = "dill" version = "0.3.9" @@ -2348,13 +2250,13 @@ files = [ [[package]] name = "elastic-transport" -version = "8.15.0" +version = "8.15.1" description = "Transport classes and utilities shared among Python Elastic client libraries" optional = false python-versions = ">=3.8" files = [ - {file = "elastic_transport-8.15.0-py3-none-any.whl", hash = "sha256:d7080d1dada2b4eee69e7574f9c17a76b42f2895eff428e562f94b0360e158c0"}, - {file = "elastic_transport-8.15.0.tar.gz", hash = "sha256:85d62558f9baafb0868c801233a59b235e61d7b4804c28c2fadaa866b6766233"}, + {file = "elastic_transport-8.15.1-py3-none-any.whl", hash = "sha256:b5e82ff1679d8c7705a03fd85c7f6ef85d6689721762d41228dd312e34f331fc"}, + {file = "elastic_transport-8.15.1.tar.gz", hash = "sha256:9cac4ab5cf9402668cf305ae0b7d93ddc0c7b61461d6d1027850db6da9cc5742"}, ] [package.dependencies] @@ -2362,17 +2264,17 @@ certifi = "*" urllib3 = ">=1.26.2,<3" [package.extras] -develop = ["aiohttp", "furo", "httpx", "opentelemetry-api", "opentelemetry-sdk", "orjson", "pytest", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests", "respx", "sphinx (>2)", "sphinx-autodoc-typehints", "trustme"] +develop = ["aiohttp", "furo", "httpcore (<1.0.6)", "httpx", "opentelemetry-api", "opentelemetry-sdk", "orjson", "pytest", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests", "respx", "sphinx (>2)", "sphinx-autodoc-typehints", "trustme"] [[package]] name = "elasticsearch" -version = "8.15.1" +version = "8.14.0" description = "Python client for Elasticsearch" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "elasticsearch-8.15.1-py3-none-any.whl", hash = "sha256:02a0476e98768a30d7926335fc0d305c04fdb928eea1354c6e6040d8c2814569"}, - {file = "elasticsearch-8.15.1.tar.gz", hash = "sha256:40c0d312f8adf8bdc81795bc16a0b546ddf544cb1f90e829a244e4780c4dbfd8"}, + {file = "elasticsearch-8.14.0-py3-none-any.whl", hash = "sha256:cef8ef70a81af027f3da74a4f7d9296b390c636903088439087b8262a468c130"}, + {file = "elasticsearch-8.14.0.tar.gz", hash = "sha256:aa2490029dd96f4015b333c1827aa21fd6c0a4d223b00dfb0fe933b8d09a511b"}, ] [package.dependencies] @@ -2380,10 +2282,7 @@ elastic-transport = ">=8.13,<9" [package.extras] async = ["aiohttp (>=3,<4)"] -dev = ["aiohttp", "black", "build", "coverage", "isort", "jinja2", "mapbox-vector-tile", "nox", "numpy", "orjson", "pandas", "pyarrow", "pytest", "pytest-asyncio", "pytest-cov", "python-dateutil", "pyyaml (>=5.4)", "requests (>=2,<3)", "simsimd", "twine", "unasync"] -docs = ["sphinx", "sphinx-autodoc-typehints", "sphinx-rtd-theme (>=2.0)"] orjson = ["orjson (>=3)"] -pyarrow = ["pyarrow (>=1)"] requests = ["requests (>=2.4.0,!=2.32.2,<3.0.0)"] vectorstore-mmr = ["numpy (>=1)", "simsimd (>=3)"] @@ -2582,6 +2481,24 @@ files = [ {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, ] +[[package]] +name = "flasgger" +version = "0.9.7.1" +description = "Extract swagger specs from your flask project" +optional = false +python-versions = "*" +files = [ + {file = "flasgger-0.9.7.1.tar.gz", hash = "sha256:ca098e10bfbb12f047acc6299cc70a33851943a746e550d86e65e60d4df245fb"}, +] + +[package.dependencies] +Flask = ">=0.10" +jsonschema = ">=3.0.1" +mistune = "*" +packaging = "*" +PyYAML = ">=3.0" +six = ">=1.10.0" + [[package]] name = "flask" version = "3.0.3" @@ -3110,6 +3027,20 @@ files = [ docs = ["sphinx (>=4)", "sphinx-rtd-theme (>=1)"] tests = ["cython", "hypothesis", "mpmath", "pytest", "setuptools"] +[[package]] +name = "google" +version = "3.0.0" +description = "Python bindings to the Google search engine." +optional = false +python-versions = "*" +files = [ + {file = "google-3.0.0-py2.py3-none-any.whl", hash = "sha256:889cf695f84e4ae2c55fbc0cfdaf4c1e729417fa52ab1db0485202ba173e4935"}, + {file = "google-3.0.0.tar.gz", hash = "sha256:143530122ee5130509ad5e989f0512f7cb218b2d4eddbafbad40fd10e8d8ccbe"}, +] + +[package.dependencies] +beautifulsoup4 = "*" + [[package]] name = "google-ai-generativelanguage" version = "0.6.9" @@ -3463,6 +3394,21 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4 [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] +[[package]] +name = "gotrue" +version = "2.9.2" +description = "Python Client Library for Supabase Auth" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "gotrue-2.9.2-py3-none-any.whl", hash = "sha256:fcd5279e8f1cc630f3ac35af5485fe39f8030b23906776920d2c32a4e308cff4"}, + {file = "gotrue-2.9.2.tar.gz", hash = "sha256:57b3245e916c5efbf19a21b1181011a903c1276bb1df2d847558f2f24f29abb2"}, +] + +[package.dependencies] +httpx = {version = ">=0.26,<0.28", extras = ["http2"]} +pydantic = ">=1.10,<3" + [[package]] name = "greenlet" version = "3.1.1" @@ -4124,18 +4070,15 @@ files = [ [[package]] name = "isodate" -version = "0.6.1" +version = "0.7.2" description = "An ISO 8601 date/time/duration parser and formatter" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, + {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, + {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, ] -[package.dependencies] -six = "*" - [[package]] name = "itsdangerous" version = "2.2.0" @@ -4257,13 +4200,13 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2023.12.1" +version = "2024.10.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, + {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, + {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, ] [package.dependencies] @@ -4483,13 +4426,13 @@ six = "*" [[package]] name = "langfuse" -version = "2.51.3" +version = "2.51.5" description = "A client library for accessing langfuse" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langfuse-2.51.3-py3-none-any.whl", hash = "sha256:32aba050123656ec0c165583e1d33243cc7a14e7b8498ed3c9de808aa90306f1"}, - {file = "langfuse-2.51.3.tar.gz", hash = "sha256:ccd2109556ee232db717abfb751ee8a3139f074db7d3e06a4f1a756c349fc5ba"}, + {file = "langfuse-2.51.5-py3-none-any.whl", hash = "sha256:b95401ca710ef94b521afa6541933b6f93d7cfd4a97523c8fc75bca4d6d219fb"}, + {file = "langfuse-2.51.5.tar.gz", hash = "sha256:55bc37b5c5d3ae133c1a95db09117cfb3117add110ba02ebbf2ce45ac4395c5b"}, ] [package.dependencies] @@ -4508,13 +4451,13 @@ openai = ["openai (>=0.27.8)"] [[package]] name = "langsmith" -version = "0.1.131" +version = "0.1.134" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.131-py3-none-any.whl", hash = "sha256:80c106b1c42307195cc0bb3a596472c41ef91b79d15bcee9938307800336c563"}, - {file = "langsmith-0.1.131.tar.gz", hash = "sha256:626101a3bf3ca481e5110d5155ace8aa066e4e9cc2fa7d96c8290ade0fbff797"}, + {file = "langsmith-0.1.134-py3-none-any.whl", hash = "sha256:ada98ad80ef38807725f32441a472da3dd28394010877751f48f458d3289da04"}, + {file = "langsmith-0.1.134.tar.gz", hash = "sha256:23abee3b508875a0e63c602afafffc02442a19cfd88f9daae05b3e9054fd6b61"}, ] [package.dependencies] @@ -4856,71 +4799,72 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.5" +version = "3.0.1" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-win32.whl", hash = "sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-win32.whl", hash = "sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-win32.whl", hash = "sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-win32.whl", hash = "sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-win32.whl", hash = "sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-win32.whl", hash = "sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b"}, + {file = "markupsafe-3.0.1.tar.gz", hash = "sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344"}, ] [[package]] @@ -5010,13 +4954,23 @@ python-versions = ">=3.7" files = [ {file = "milvus_lite-2.4.10-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:fc4246d3ed7d1910847afce0c9ba18212e93a6e9b8406048436940578dfad5cb"}, {file = "milvus_lite-2.4.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:74a8e07c5e3b057df17fbb46913388e84df1dc403a200f4e423799a58184c800"}, - {file = "milvus_lite-2.4.10-py3-none-manylinux2014_aarch64.whl", hash = "sha256:240c7386b747bad696ecb5bd1f58d491e86b9d4b92dccee3315ed7256256eddc"}, {file = "milvus_lite-2.4.10-py3-none-manylinux2014_x86_64.whl", hash = "sha256:211d2e334a043f9282bdd9755f76b9b2d93b23bffa7af240919ffce6a8dfe325"}, ] [package.dependencies] tqdm = "*" +[[package]] +name = "mistune" +version = "3.0.2" +description = "A sane and fast Markdown parser with useful plugins and renderers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, + {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, +] + [[package]] name = "mmh3" version = "5.0.1" @@ -5635,19 +5589,19 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "oci" -version = "2.135.1" +version = "2.135.2" description = "Oracle Cloud Infrastructure Python SDK" optional = false python-versions = "*" files = [ - {file = "oci-2.135.1-py3-none-any.whl", hash = "sha256:249e24638faa2058b2fa9ab4917ff230f2a8b3b97cd11a261ce6c7f97c3ed2ff"}, - {file = "oci-2.135.1.tar.gz", hash = "sha256:bea8aaba19d3fed186ceaeedba6d46f1cdb3c6c7d93b39ab225a15cbd63f9ce8"}, + {file = "oci-2.135.2-py3-none-any.whl", hash = "sha256:5213319244e1c7f108bcb417322f33f01f043fd9636d4063574039f5fdf4e4f7"}, + {file = "oci-2.135.2.tar.gz", hash = "sha256:520f78983c5246eae80dd5ecfd05e3a565c8b98d02ef0c1b11ba1f61bcccb61d"}, ] [package.dependencies] certifi = "*" circuitbreaker = {version = ">=1.3.1,<3.0.0", markers = "python_version >= \"3.7\""} -cryptography = ">=3.2.1,<43.0.0" +cryptography = ">=3.2.1,<46.0.0" pyOpenSSL = ">=17.5.0,<25.0.0" python-dateutil = ">=2.5.3,<3.0.0" pytz = ">=2016.10" @@ -6277,13 +6231,13 @@ files = [ [[package]] name = "pgvecto-rs" -version = "0.2.1" +version = "0.2.2" description = "Python binding for pgvecto.rs" optional = false python-versions = "<3.13,>=3.8" files = [ - {file = "pgvecto_rs-0.2.1-py3-none-any.whl", hash = "sha256:b3ee2c465219469ad537b3efea2916477c6c576b3d6fd4298980d0733d12bb27"}, - {file = "pgvecto_rs-0.2.1.tar.gz", hash = "sha256:07046eaad2c4f75745f76de9ba483541909f1c595aced8d3434224a4f933daca"}, + {file = "pgvecto_rs-0.2.2-py3-none-any.whl", hash = "sha256:5f3f7f806813de408c45dc10a9eb418b986c4d7b7723e8fce9298f2f7d8fbbd5"}, + {file = "pgvecto_rs-0.2.2.tar.gz", hash = "sha256:edaa913d1747152b1407cbdf6337d51ac852547b54953ef38997433be3a75a3b"}, ] [package.dependencies] @@ -6483,6 +6437,23 @@ docs = ["sphinx (>=1.7.1)"] redis = ["redis"] tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] +[[package]] +name = "postgrest" +version = "0.17.1" +description = "PostgREST client for Python. This library provides an ORM interface to PostgREST." +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "postgrest-0.17.1-py3-none-any.whl", hash = "sha256:ec1d00dc8532fe5ffb342cfc7c4e610a1e0e2272eb14f78f9b2b61094f9be510"}, + {file = "postgrest-0.17.1.tar.gz", hash = "sha256:e31d9977dbb80dc5f9fdd4d444014686606692dc4ddb9adc85639e56c6d54c92"}, +] + +[package.dependencies] +deprecation = ">=2.1.0,<3.0.0" +httpx = {version = ">=0.26,<0.28", extras = ["http2"]} +pydantic = ">=1.9,<3.0" +strenum = ">=0.4.9,<0.5.0" + [[package]] name = "posthog" version = "3.7.0" @@ -6712,6 +6683,17 @@ files = [ {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, ] +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + [[package]] name = "py-cpuinfo" version = "9.0.0" @@ -7089,6 +7071,22 @@ bulk-writer = ["azure-storage-blob", "minio (>=7.0.0)", "pyarrow (>=12.0.0)", "r dev = ["black", "grpcio (==1.62.2)", "grpcio-testing (==1.62.2)", "grpcio-tools (==1.62.2)", "pytest (>=5.3.4)", "pytest-cov (>=2.8.1)", "pytest-timeout (>=1.3.4)", "ruff (>0.4.0)"] model = ["milvus-model (>=0.1.0)"] +[[package]] +name = "pymochow" +version = "1.3.1" +description = "Python SDK for mochow" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pymochow-1.3.1-py3-none-any.whl", hash = "sha256:a7f3b34fd6ea5d1d8413650bb6678365aa148fc396ae945e4ccb4f2365a52327"}, + {file = "pymochow-1.3.1.tar.gz", hash = "sha256:1693d10cd0bb7bce45327890a90adafb503155922ccc029acb257699a73a20ba"}, +] + +[package.dependencies] +future = "*" +orjson = "*" +requests = "*" + [[package]] name = "pymysql" version = "1.1.1" @@ -7124,13 +7122,13 @@ test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"] [[package]] name = "pypandoc" -version = "1.13" +version = "1.14" description = "Thin wrapper for pandoc." optional = false python-versions = ">=3.6" files = [ - {file = "pypandoc-1.13-py3-none-any.whl", hash = "sha256:4c7d71bf2f1ed122aac287113b5c4d537a33bbc3c1df5aed11a7d4a7ac074681"}, - {file = "pypandoc-1.13.tar.gz", hash = "sha256:31652073c7960c2b03570bd1e94f602ca9bc3e70099df5ead4cea98ff5151c1e"}, + {file = "pypandoc-1.14-py3-none-any.whl", hash = "sha256:1315c7ad7fac7236dacf69a05b521ed2c3f1d0177f70e9b92bfffce6c023df22"}, + {file = "pypandoc-1.14.tar.gz", hash = "sha256:6b4c45f5f1b9fb5bb562079164806bdbbc3e837b5402bcf3f1139edc5730a197"}, ] [[package]] @@ -7783,6 +7781,23 @@ dev = ["coveralls", "m2r", "pycodestyle", "pyflakes", "pylint", "pytest", "pytes docs = ["m2r", "sphinx"] test = ["coveralls", "pycodestyle", "pyflakes", "pylint", "pytest", "pytest-benchmark", "pytest-cov"] +[[package]] +name = "realtime" +version = "2.0.2" +description = "" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "realtime-2.0.2-py3-none-any.whl", hash = "sha256:2634c915bc38807f2013f21e8bcc4d2f79870dfd81460ddb9393883d0489928a"}, + {file = "realtime-2.0.2.tar.gz", hash = "sha256:519da9325b3b8102139d51785013d592f6b2403d81fa21d838a0b0234723ed7d"}, +] + +[package.dependencies] +aiohttp = ">=3.10.2,<4.0.0" +python-dateutil = ">=2.8.1,<3.0.0" +typing-extensions = ">=4.12.2,<5.0.0" +websockets = ">=11,<13" + [[package]] name = "redis" version = "5.0.8" @@ -8021,6 +8036,21 @@ files = [ [package.dependencies] requests = "2.31.0" +[[package]] +name = "retry" +version = "0.9.2" +description = "Easy to use retry decorator." +optional = false +python-versions = "*" +files = [ + {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, + {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, +] + +[package.dependencies] +decorator = ">=3.4.2" +py = ">=1.4.26,<2.0.0" + [[package]] name = "rich" version = "13.9.2" @@ -8195,13 +8225,13 @@ files = [ [[package]] name = "s3transfer" -version = "0.10.2" +version = "0.10.3" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.8" files = [ - {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, - {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, + {file = "s3transfer-0.10.3-py3-none-any.whl", hash = "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d"}, + {file = "s3transfer-0.10.3.tar.gz", hash = "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c"}, ] [package.dependencies] @@ -8666,19 +8696,20 @@ files = [ [[package]] name = "simple-websocket" -version = "1.0.0" +version = "1.1.0" description = "Simple WebSocket server and client for Python" optional = false python-versions = ">=3.6" files = [ - {file = "simple-websocket-1.0.0.tar.gz", hash = "sha256:17d2c72f4a2bd85174a97e3e4c88b01c40c3f81b7b648b0cc3ce1305968928c8"}, - {file = "simple_websocket-1.0.0-py3-none-any.whl", hash = "sha256:1d5bf585e415eaa2083e2bcf02a3ecf91f9712e7b3e6b9fa0b461ad04e0837bc"}, + {file = "simple_websocket-1.1.0-py3-none-any.whl", hash = "sha256:4af6069630a38ed6c561010f0e11a5bc0d4ca569b36306eb257cd9a192497c8c"}, + {file = "simple_websocket-1.1.0.tar.gz", hash = "sha256:7939234e7aa067c534abdab3a9ed933ec9ce4691b0713c78acb195560aa52ae4"}, ] [package.dependencies] wsproto = "*" [package.extras] +dev = ["flake8", "pytest", "pytest-cov", "tox"] docs = ["sphinx"] [[package]] @@ -8855,6 +8886,38 @@ anyio = ">=3.4.0,<5" [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] +[[package]] +name = "storage3" +version = "0.8.1" +description = "Supabase Storage client for Python." +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "storage3-0.8.1-py3-none-any.whl", hash = "sha256:0b21205f43eaf0d1dd33bde6c6d0612f88524b7865f017d2ae9827e3f63d9cdc"}, + {file = "storage3-0.8.1.tar.gz", hash = "sha256:ea60b68b2221b3868ccc1a7f1294d57d0d9c51642cdc639d8115fe5d0adc8892"}, +] + +[package.dependencies] +httpx = {version = ">=0.26,<0.28", extras = ["http2"]} +python-dateutil = ">=2.8.2,<3.0.0" +typing-extensions = ">=4.2.0,<5.0.0" + +[[package]] +name = "strenum" +version = "0.4.15" +description = "An Enum that inherits from str." +optional = false +python-versions = "*" +files = [ + {file = "StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659"}, + {file = "StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff"}, +] + +[package.extras] +docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"] +release = ["twine"] +test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"] + [[package]] name = "strictyaml" version = "1.7.3" @@ -8869,6 +8932,40 @@ files = [ [package.dependencies] python-dateutil = ">=2.6.0" +[[package]] +name = "supabase" +version = "2.8.1" +description = "Supabase client for Python." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "supabase-2.8.1-py3-none-any.whl", hash = "sha256:dfa8bef89b54129093521d5bba2136ff765baf67cd76d8ad0aa4984d61a7815c"}, + {file = "supabase-2.8.1.tar.gz", hash = "sha256:711c70e6acd9e2ff48ca0dc0b1bb70c01c25378cc5189ec9f5ed9655b30bc41d"}, +] + +[package.dependencies] +gotrue = ">=2.7.0,<3.0.0" +httpx = ">=0.24,<0.28" +postgrest = ">=0.17.0,<0.18.0" +realtime = ">=2.0.0,<3.0.0" +storage3 = ">=0.8.0,<0.9.0" +supafunc = ">=0.6.0,<0.7.0" +typing-extensions = ">=4.12.2,<5.0.0" + +[[package]] +name = "supafunc" +version = "0.6.1" +description = "Library for Supabase Functions" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "supafunc-0.6.1-py3-none-any.whl", hash = "sha256:01aeeeb4bf429977664454a32c86418345140faf6d2e6eb0636d52e4547c5fbb"}, + {file = "supafunc-0.6.1.tar.gz", hash = "sha256:3c8761e3999336ccdb7550498a395fd08afc8469382f55ea56f7f640e5a909aa"}, +] + +[package.dependencies] +httpx = {version = ">=0.26,<0.28", extras = ["http2"]} + [[package]] name = "sympy" version = "1.13.3" @@ -8943,13 +9040,13 @@ test = ["pytest", "tornado (>=4.5)", "typeguard"] [[package]] name = "tencentcloud-sdk-python-common" -version = "3.0.1244" +version = "3.0.1247" description = "Tencent Cloud Common SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-common-3.0.1244.tar.gz", hash = "sha256:51e895a4f380ead51de5ebcef990cfda17cf466b130b8d1be7a95c11158a66e2"}, - {file = "tencentcloud_sdk_python_common-3.0.1244-py2.py3-none-any.whl", hash = "sha256:a3275496740d7876386166e83331eab7ad66e70fe4216c96893e9697fdeb9848"}, + {file = "tencentcloud-sdk-python-common-3.0.1247.tar.gz", hash = "sha256:1467ac3eaaa5b5d299570ba781903debc4be32dbb3f0f39929a357531ab89170"}, + {file = "tencentcloud_sdk_python_common-3.0.1247-py2.py3-none-any.whl", hash = "sha256:9829d2299c85a2494d6d816247345e98abd2f936cd309e1f67847243f5235091"}, ] [package.dependencies] @@ -8957,17 +9054,17 @@ requests = ">=2.16.0" [[package]] name = "tencentcloud-sdk-python-hunyuan" -version = "3.0.1244" +version = "3.0.1247" description = "Tencent Cloud Hunyuan SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-hunyuan-3.0.1244.tar.gz", hash = "sha256:b43f6848ac464cb244f6b1a17c97b00cd36ea40e91a4fba9f7b8ac1e94ae600a"}, - {file = "tencentcloud_sdk_python_hunyuan-3.0.1244-py2.py3-none-any.whl", hash = "sha256:c2679f49d129dd5ec905cbac97884f6a872e50300ec7ba1ab4fe654d1458213f"}, + {file = "tencentcloud-sdk-python-hunyuan-3.0.1247.tar.gz", hash = "sha256:85b7332ec55f891a3b4d776e6b30ee2a44cc08c70b689615805aadff6e424fdd"}, + {file = "tencentcloud_sdk_python_hunyuan-3.0.1247-py2.py3-none-any.whl", hash = "sha256:69fdb886616e53ce02e848e5a1a8b36922db731457b07365f230ffb0aa472b5b"}, ] [package.dependencies] -tencentcloud-sdk-python-common = "3.0.1244" +tencentcloud-sdk-python-common = "3.0.1247" [[package]] name = "threadpoolctl" @@ -9419,23 +9516,6 @@ files = [ {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] -[[package]] -name = "tzlocal" -version = "5.2" -description = "tzinfo object for the local timezone" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, - {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, -] - -[package.dependencies] -tzdata = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] - [[package]] name = "ujson" version = "5.10.0" @@ -9636,13 +9716,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.31.0" +version = "0.31.1" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.31.0-py3-none-any.whl", hash = "sha256:cac7be4dd4d891c363cd942160a7b02e69150dcbc7a36be04d5f4af4b17c8ced"}, - {file = "uvicorn-0.31.0.tar.gz", hash = "sha256:13bc21373d103859f68fe739608e2eb054a816dea79189bc3ca08ea89a275906"}, + {file = "uvicorn-0.31.1-py3-none-any.whl", hash = "sha256:adc42d9cac80cf3e51af97c1851648066841e7cfb6993a4ca8de29ac1548ed41"}, + {file = "uvicorn-0.31.1.tar.gz", hash = "sha256:f5167919867b161b7bcaf32646c6a94cdbd4c3aa2eb5c17d36bb9aa5cfd8c493"}, ] [package.dependencies] @@ -9717,19 +9797,20 @@ files = [ [[package]] name = "vanna" -version = "0.5.5" +version = "0.7.3" description = "Generate SQL queries from natural language" optional = false python-versions = ">=3.9" files = [ - {file = "vanna-0.5.5-py3-none-any.whl", hash = "sha256:e1a308b7127b9e98c2579c0e4178fc1475d891c498e4a0667cffa10df8891e73"}, - {file = "vanna-0.5.5.tar.gz", hash = "sha256:7d9bf188a635bb75e4f8db15f0e6dbe72a426784779485f087b2df0ce175e664"}, + {file = "vanna-0.7.3-py3-none-any.whl", hash = "sha256:82ba39e5d6c503d1c8cca60835ed401d20ec3a3da98d487f529901dcb30061d6"}, + {file = "vanna-0.7.3.tar.gz", hash = "sha256:4590dd94d2fe180b4efc7a83c867b73144ef58794018910dc226857cfb703077"}, ] [package.dependencies] -clickhouse_driver = {version = "*", optional = true, markers = "extra == \"clickhouse\""} +clickhouse_connect = {version = "*", optional = true, markers = "extra == \"clickhouse\""} db-dtypes = {version = "*", optional = true, markers = "extra == \"postgres\""} duckdb = {version = "*", optional = true, markers = "extra == \"duckdb\""} +flasgger = "*" flask = "*" flask-sock = "*" kaleido = "*" @@ -9743,17 +9824,20 @@ sqlparse = "*" tabulate = "*" [package.extras] -all = ["PyMySQL", "anthropic", "chromadb", "db-dtypes", "duckdb", "fastembed", "google-cloud-aiplatform", "google-cloud-bigquery", "google-generativeai", "httpx", "marqo", "mistralai", "ollama", "openai", "opensearch-dsl", "opensearch-py", "pinecone-client", "psycopg2-binary", "qdrant-client", "snowflake-connector-python", "transformers", "zhipuai"] +all = ["PyMySQL", "anthropic", "azure-common", "azure-identity", "azure-search-documents", "chromadb", "db-dtypes", "duckdb", "fastembed", "google-cloud-aiplatform", "google-cloud-bigquery", "google-generativeai", "httpx", "marqo", "mistralai (>=1.0.0)", "ollama", "openai", "opensearch-dsl", "opensearch-py", "pinecone-client", "psycopg2-binary", "pymilvus[model]", "qdrant-client", "qianfan", "snowflake-connector-python", "transformers", "weaviate-client", "zhipuai"] anthropic = ["anthropic"] +azuresearch = ["azure-common", "azure-identity", "azure-search-documents", "fastembed"] +bedrock = ["boto3", "botocore"] bigquery = ["google-cloud-bigquery"] chromadb = ["chromadb"] -clickhouse = ["clickhouse_driver"] +clickhouse = ["clickhouse_connect"] duckdb = ["duckdb"] gemini = ["google-generativeai"] google = ["google-cloud-aiplatform", "google-generativeai"] hf = ["transformers"] marqo = ["marqo"] -mistralai = ["mistralai"] +milvus = ["pymilvus[model]"] +mistralai = ["mistralai (>=1.0.0)"] mysql = ["PyMySQL"] ollama = ["httpx", "ollama"] openai = ["openai"] @@ -9761,9 +9845,11 @@ opensearch = ["opensearch-dsl", "opensearch-py"] pinecone = ["fastembed", "pinecone-client"] postgres = ["db-dtypes", "psycopg2-binary"] qdrant = ["fastembed", "qdrant-client"] +qianfan = ["qianfan"] snowflake = ["snowflake-connector-python"] test = ["tox"] vllm = ["vllm"] +weaviate = ["weaviate-client"] zhipuai = ["zhipuai"] [[package]] @@ -9777,6 +9863,26 @@ files = [ {file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"}, ] +[[package]] +name = "volcengine-compat" +version = "1.0.156" +description = "Be Compatible with the Volcengine SDK for Python, The version of package dependencies has been modified. like pycryptodome, pytz." +optional = false +python-versions = "*" +files = [ + {file = "volcengine_compat-1.0.156-py3-none-any.whl", hash = "sha256:4abc149a7601ebad8fa2d28fab50c7945145cf74daecb71bca797b0bdc82c5a5"}, + {file = "volcengine_compat-1.0.156.tar.gz", hash = "sha256:e357d096828e31a202dc6047bbc5bf6fff3f54a98cd35a99ab5f965ea741a267"}, +] + +[package.dependencies] +google = ">=3.0.0" +protobuf = ">=3.18.3" +pycryptodome = ">=3.9.9" +pytz = ">=2020.5" +requests = ">=2.25.1" +retry = ">=0.9.2" +six = ">=1.0" + [[package]] name = "volcengine-python-sdk" version = "1.0.103" @@ -9954,97 +10060,83 @@ test = ["websockets"] [[package]] name = "websockets" -version = "13.1" +version = "12.0" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.8" files = [ - {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"}, - {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"}, - {file = "websockets-13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6"}, - {file = "websockets-13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676df3fe46956fbb0437d8800cd5f2b6d41143b6e7e842e60554398432cf29b"}, - {file = "websockets-13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7affedeb43a70351bb811dadf49493c9cfd1ed94c9c70095fd177e9cc1541fa"}, - {file = "websockets-13.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1971e62d2caa443e57588e1d82d15f663b29ff9dfe7446d9964a4b6f12c1e700"}, - {file = "websockets-13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5f2e75431f8dc4a47f31565a6e1355fb4f2ecaa99d6b89737527ea917066e26c"}, - {file = "websockets-13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58cf7e75dbf7e566088b07e36ea2e3e2bd5676e22216e4cad108d4df4a7402a0"}, - {file = "websockets-13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c90d6dec6be2c7d03378a574de87af9b1efea77d0c52a8301dd831ece938452f"}, - {file = "websockets-13.1-cp310-cp310-win32.whl", hash = "sha256:730f42125ccb14602f455155084f978bd9e8e57e89b569b4d7f0f0c17a448ffe"}, - {file = "websockets-13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5993260f483d05a9737073be197371940c01b257cc45ae3f1d5d7adb371b266a"}, - {file = "websockets-13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:61fc0dfcda609cda0fc9fe7977694c0c59cf9d749fbb17f4e9483929e3c48a19"}, - {file = "websockets-13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ceec59f59d092c5007e815def4ebb80c2de330e9588e101cf8bd94c143ec78a5"}, - {file = "websockets-13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1dca61c6db1166c48b95198c0b7d9c990b30c756fc2923cc66f68d17dc558fd"}, - {file = "websockets-13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:308e20f22c2c77f3f39caca508e765f8725020b84aa963474e18c59accbf4c02"}, - {file = "websockets-13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d516c325e6540e8a57b94abefc3459d7dab8ce52ac75c96cad5549e187e3a7"}, - {file = "websockets-13.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c6e35319b46b99e168eb98472d6c7d8634ee37750d7693656dc766395df096"}, - {file = "websockets-13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f9fee94ebafbc3117c30be1844ed01a3b177bb6e39088bc6b2fa1dc15572084"}, - {file = "websockets-13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7c1e90228c2f5cdde263253fa5db63e6653f1c00e7ec64108065a0b9713fa1b3"}, - {file = "websockets-13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6548f29b0e401eea2b967b2fdc1c7c7b5ebb3eeb470ed23a54cd45ef078a0db9"}, - {file = "websockets-13.1-cp311-cp311-win32.whl", hash = "sha256:c11d4d16e133f6df8916cc5b7e3e96ee4c44c936717d684a94f48f82edb7c92f"}, - {file = "websockets-13.1-cp311-cp311-win_amd64.whl", hash = "sha256:d04f13a1d75cb2b8382bdc16ae6fa58c97337253826dfe136195b7f89f661557"}, - {file = "websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc"}, - {file = "websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49"}, - {file = "websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd"}, - {file = "websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0"}, - {file = "websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6"}, - {file = "websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9"}, - {file = "websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68"}, - {file = "websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14"}, - {file = "websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf"}, - {file = "websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c"}, - {file = "websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3"}, - {file = "websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6"}, - {file = "websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708"}, - {file = "websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418"}, - {file = "websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a"}, - {file = "websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f"}, - {file = "websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5"}, - {file = "websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135"}, - {file = "websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2"}, - {file = "websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6"}, - {file = "websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d"}, - {file = "websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2"}, - {file = "websockets-13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c7934fd0e920e70468e676fe7f1b7261c1efa0d6c037c6722278ca0228ad9d0d"}, - {file = "websockets-13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:149e622dc48c10ccc3d2760e5f36753db9cacf3ad7bc7bbbfd7d9c819e286f23"}, - {file = "websockets-13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a569eb1b05d72f9bce2ebd28a1ce2054311b66677fcd46cf36204ad23acead8c"}, - {file = "websockets-13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95df24ca1e1bd93bbca51d94dd049a984609687cb2fb08a7f2c56ac84e9816ea"}, - {file = "websockets-13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8dbb1bf0c0a4ae8b40bdc9be7f644e2f3fb4e8a9aca7145bfa510d4a374eeb7"}, - {file = "websockets-13.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:035233b7531fb92a76beefcbf479504db8c72eb3bff41da55aecce3a0f729e54"}, - {file = "websockets-13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e4450fc83a3df53dec45922b576e91e94f5578d06436871dce3a6be38e40f5db"}, - {file = "websockets-13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:463e1c6ec853202dd3657f156123d6b4dad0c546ea2e2e38be2b3f7c5b8e7295"}, - {file = "websockets-13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6d6855bbe70119872c05107e38fbc7f96b1d8cb047d95c2c50869a46c65a8e96"}, - {file = "websockets-13.1-cp38-cp38-win32.whl", hash = "sha256:204e5107f43095012b00f1451374693267adbb832d29966a01ecc4ce1db26faf"}, - {file = "websockets-13.1-cp38-cp38-win_amd64.whl", hash = "sha256:485307243237328c022bc908b90e4457d0daa8b5cf4b3723fd3c4a8012fce4c6"}, - {file = "websockets-13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b37c184f8b976f0c0a231a5f3d6efe10807d41ccbe4488df8c74174805eea7d"}, - {file = "websockets-13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:163e7277e1a0bd9fb3c8842a71661ad19c6aa7bb3d6678dc7f89b17fbcc4aeb7"}, - {file = "websockets-13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b889dbd1342820cc210ba44307cf75ae5f2f96226c0038094455a96e64fb07a"}, - {file = "websockets-13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:586a356928692c1fed0eca68b4d1c2cbbd1ca2acf2ac7e7ebd3b9052582deefa"}, - {file = "websockets-13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bd6abf1e070a6b72bfeb71049d6ad286852e285f146682bf30d0296f5fbadfa"}, - {file = "websockets-13.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2aad13a200e5934f5a6767492fb07151e1de1d6079c003ab31e1823733ae79"}, - {file = "websockets-13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:df01aea34b6e9e33572c35cd16bae5a47785e7d5c8cb2b54b2acdb9678315a17"}, - {file = "websockets-13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e54affdeb21026329fb0744ad187cf812f7d3c2aa702a5edb562b325191fcab6"}, - {file = "websockets-13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ef8aa8bdbac47f4968a5d66462a2a0935d044bf35c0e5a8af152d58516dbeb5"}, - {file = "websockets-13.1-cp39-cp39-win32.whl", hash = "sha256:deeb929efe52bed518f6eb2ddc00cc496366a14c726005726ad62c2dd9017a3c"}, - {file = "websockets-13.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c65ffa900e7cc958cd088b9a9157a8141c991f8c53d11087e6fb7277a03f81d"}, - {file = "websockets-13.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5dd6da9bec02735931fccec99d97c29f47cc61f644264eb995ad6c0c27667238"}, - {file = "websockets-13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2510c09d8e8df777177ee3d40cd35450dc169a81e747455cc4197e63f7e7bfe5"}, - {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9"}, - {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcc03c8b72267e97b49149e4863d57c2d77f13fae12066622dc78fe322490fe6"}, - {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004280a140f220c812e65f36944a9ca92d766b6cc4560be652a0a3883a79ed8a"}, - {file = "websockets-13.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2620453c075abeb0daa949a292e19f56de518988e079c36478bacf9546ced23"}, - {file = "websockets-13.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9156c45750b37337f7b0b00e6248991a047be4aa44554c9886fe6bdd605aab3b"}, - {file = "websockets-13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80c421e07973a89fbdd93e6f2003c17d20b69010458d3a8e37fb47874bd67d51"}, - {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82d0ba76371769d6a4e56f7e83bb8e81846d17a6190971e38b5de108bde9b0d7"}, - {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9875a0143f07d74dc5e1ded1c4581f0d9f7ab86c78994e2ed9e95050073c94d"}, - {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11e38ad8922c7961447f35c7b17bffa15de4d17c70abd07bfbe12d6faa3e027"}, - {file = "websockets-13.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4059f790b6ae8768471cddb65d3c4fe4792b0ab48e154c9f0a04cefaabcd5978"}, - {file = "websockets-13.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:25c35bf84bf7c7369d247f0b8cfa157f989862c49104c5cf85cb5436a641d93e"}, - {file = "websockets-13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:83f91d8a9bb404b8c2c41a707ac7f7f75b9442a0a876df295de27251a856ad09"}, - {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a43cfdcddd07f4ca2b1afb459824dd3c6d53a51410636a2c7fc97b9a8cf4842"}, - {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a2ef1381632a2f0cb4efeff34efa97901c9fbc118e01951ad7cfc10601a9bb"}, - {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bf774c754c35dbb487360b12c5727adab887f1622b8aed5755880a21c4a20"}, - {file = "websockets-13.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:95858ca14a9f6fa8413d29e0a585b31b278388aa775b8a81fa24830123874678"}, - {file = "websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f"}, - {file = "websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, + {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, + {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, + {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, + {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, + {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, + {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, + {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, + {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, + {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, + {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, + {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, + {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, + {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, + {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, + {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, + {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, + {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, + {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, + {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, + {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, + {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, + {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, + {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, ] [[package]] @@ -10233,13 +10325,13 @@ files = [ [[package]] name = "xmltodict" -version = "0.13.0" +version = "0.14.1" description = "Makes working with XML feel like you are working with JSON" optional = false -python-versions = ">=3.4" +python-versions = ">=3.6" files = [ - {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, - {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, + {file = "xmltodict-0.14.1-py2.py3-none-any.whl", hash = "sha256:3ef4a7b71c08f19047fcbea572e1d7f4207ab269da1565b5d40e9823d3894e63"}, + {file = "xmltodict-0.14.1.tar.gz", hash = "sha256:338c8431e4fc554517651972d62f06958718f6262b04316917008e8fd677a6b0"}, ] [[package]] @@ -10377,20 +10469,21 @@ repair = ["scipy (>=1.6.3)"] [[package]] name = "zhipuai" -version = "1.0.7" +version = "2.1.5.20230904" description = "A SDK library for accessing big model apis from ZhipuAI" optional = false -python-versions = ">=3.6" +python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" files = [ - {file = "zhipuai-1.0.7-py3-none-any.whl", hash = "sha256:360c01b8c2698f366061452e86d5a36a5ff68a576ea33940da98e4806f232530"}, - {file = "zhipuai-1.0.7.tar.gz", hash = "sha256:b80f699543d83cce8648acf1ce32bc2725d1c1c443baffa5882abc2cc704d581"}, + {file = "zhipuai-2.1.5.20230904-py3-none-any.whl", hash = "sha256:8485ca452c2f07fea476fb0666abc8fbbdf1b2e4feeee46a3bb3c1a2b51efccd"}, + {file = "zhipuai-2.1.5.20230904.tar.gz", hash = "sha256:2c19dd796b12e2f19b93d8f9be6fd01e85d3320737a187ebf3c75a9806a7c2b5"}, ] [package.dependencies] -cachetools = "*" -dataclasses = "*" -PyJWT = "*" -requests = "*" +cachetools = ">=4.2.2" +httpx = ">=0.23.0" +pydantic = ">=1.9.0,<3.0" +pydantic-core = ">=2.14.6" +pyjwt = ">=2.8.0,<2.9.0" [[package]] name = "zipp" @@ -10431,54 +10524,57 @@ test = ["zope.testrunner"] [[package]] name = "zope-interface" -version = "7.0.3" +version = "7.1.0" description = "Interfaces for Python" optional = false python-versions = ">=3.8" files = [ - {file = "zope.interface-7.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b9369671a20b8d039b8e5a1a33abd12e089e319a3383b4cc0bf5c67bd05fe7b"}, - {file = "zope.interface-7.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db6237e8fa91ea4f34d7e2d16d74741187e9105a63bbb5686c61fea04cdbacca"}, - {file = "zope.interface-7.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53d678bb1c3b784edbfb0adeebfeea6bf479f54da082854406a8f295d36f8386"}, - {file = "zope.interface-7.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3aa8fcbb0d3c2be1bfd013a0f0acd636f6ed570c287743ae2bbd467ee967154d"}, - {file = "zope.interface-7.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6195c3c03fef9f87c0dbee0b3b6451df6e056322463cf35bca9a088e564a3c58"}, - {file = "zope.interface-7.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:11fa1382c3efb34abf16becff8cb214b0b2e3144057c90611621f2d186b7e1b7"}, - {file = "zope.interface-7.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:af94e429f9d57b36e71ef4e6865182090648aada0cb2d397ae2b3f7fc478493a"}, - {file = "zope.interface-7.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dd647fcd765030638577fe6984284e0ebba1a1008244c8a38824be096e37fe3"}, - {file = "zope.interface-7.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bee1b722077d08721005e8da493ef3adf0b7908e0cd85cc7dc836ac117d6f32"}, - {file = "zope.interface-7.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2545d6d7aac425d528cd9bf0d9e55fcd47ab7fd15f41a64b1c4bf4c6b24946dc"}, - {file = "zope.interface-7.0.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d04b11ea47c9c369d66340dbe51e9031df2a0de97d68f442305ed7625ad6493"}, - {file = "zope.interface-7.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:064ade95cb54c840647205987c7b557f75d2b2f7d1a84bfab4cf81822ef6e7d1"}, - {file = "zope.interface-7.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3fcdc76d0cde1c09c37b7c6b0f8beba2d857d8417b055d4f47df9c34ec518bdd"}, - {file = "zope.interface-7.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3d4b91821305c8d8f6e6207639abcbdaf186db682e521af7855d0bea3047c8ca"}, - {file = "zope.interface-7.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35062d93bc49bd9b191331c897a96155ffdad10744ab812485b6bad5b588d7e4"}, - {file = "zope.interface-7.0.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c96b3e6b0d4f6ddfec4e947130ec30bd2c7b19db6aa633777e46c8eecf1d6afd"}, - {file = "zope.interface-7.0.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0c151a6c204f3830237c59ee4770cc346868a7a1af6925e5e38650141a7f05"}, - {file = "zope.interface-7.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:3de1d553ce72868b77a7e9d598c9bff6d3816ad2b4cc81c04f9d8914603814f3"}, - {file = "zope.interface-7.0.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab985c566a99cc5f73bc2741d93f1ed24a2cc9da3890144d37b9582965aff996"}, - {file = "zope.interface-7.0.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d976fa7b5faf5396eb18ce6c132c98e05504b52b60784e3401f4ef0b2e66709b"}, - {file = "zope.interface-7.0.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a207c6b2c58def5011768140861a73f5240f4f39800625072ba84e76c9da0b"}, - {file = "zope.interface-7.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:382d31d1e68877061daaa6499468e9eb38eb7625d4369b1615ac08d3860fe896"}, - {file = "zope.interface-7.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c4316a30e216f51acbd9fb318aa5af2e362b716596d82cbb92f9101c8f8d2e7"}, - {file = "zope.interface-7.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e6e58078ad2799130c14a1d34ec89044ada0e1495329d72ee0407b9ae5100d"}, - {file = "zope.interface-7.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:799ef7a444aebbad5a145c3b34bff012b54453cddbde3332d47ca07225792ea4"}, - {file = "zope.interface-7.0.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3b7ce6d46fb0e60897d62d1ff370790ce50a57d40a651db91a3dde74f73b738"}, - {file = "zope.interface-7.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:f418c88f09c3ba159b95a9d1cfcdbe58f208443abb1f3109f4b9b12fd60b187c"}, - {file = "zope.interface-7.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:84f8794bd59ca7d09d8fce43ae1b571be22f52748169d01a13d3ece8394d8b5b"}, - {file = "zope.interface-7.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7d92920416f31786bc1b2f34cc4fc4263a35a407425319572cbf96b51e835cd3"}, - {file = "zope.interface-7.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e5913ec718010dc0e7c215d79a9683b4990e7026828eedfda5268e74e73e11"}, - {file = "zope.interface-7.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1eeeb92cb7d95c45e726e3c1afe7707919370addae7ed14f614e22217a536958"}, - {file = "zope.interface-7.0.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd32f30f40bfd8511b17666895831a51b532e93fc106bfa97f366589d3e4e0e"}, - {file = "zope.interface-7.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:5112c530fa8aa2108a3196b9c2f078f5738c1c37cfc716970edc0df0414acda8"}, - {file = "zope.interface-7.0.3.tar.gz", hash = "sha256:cd2690d4b08ec9eaf47a85914fe513062b20da78d10d6d789a792c0b20307fb1"}, + {file = "zope.interface-7.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2bd9e9f366a5df08ebbdc159f8224904c1c5ce63893984abb76954e6fbe4381a"}, + {file = "zope.interface-7.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:661d5df403cd3c5b8699ac480fa7f58047a3253b029db690efa0c3cf209993ef"}, + {file = "zope.interface-7.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91b6c30689cfd87c8f264acb2fc16ad6b3c72caba2aec1bf189314cf1a84ca33"}, + {file = "zope.interface-7.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b6a4924f5bad9fe21d99f66a07da60d75696a136162427951ec3cb223a5570d"}, + {file = "zope.interface-7.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a3c00b35f6170be5454b45abe2719ea65919a2f09e8a6e7b1362312a872cd3"}, + {file = "zope.interface-7.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:b936d61dbe29572fd2cfe13e30b925e5383bed1aba867692670f5a2a2eb7b4e9"}, + {file = "zope.interface-7.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ac20581fc6cd7c754f6dff0ae06fedb060fa0e9ea6309d8be8b2701d9ea51c4"}, + {file = "zope.interface-7.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:848b6fa92d7c8143646e64124ed46818a0049a24ecc517958c520081fd147685"}, + {file = "zope.interface-7.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec1ef1fdb6f014d5886b97e52b16d0f852364f447d2ab0f0c6027765777b6667"}, + {file = "zope.interface-7.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bcff5c09d0215f42ba64b49205a278e44413d9bf9fa688fd9e42bfe472b5f4f"}, + {file = "zope.interface-7.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07add15de0cc7e69917f7d286b64d54125c950aeb43efed7a5ea7172f000fbc1"}, + {file = "zope.interface-7.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:9940d5bc441f887c5f375ec62bcf7e7e495a2d5b1da97de1184a88fb567f06af"}, + {file = "zope.interface-7.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f245d039f72e6f802902375755846f5de1ee1e14c3e8736c078565599bcab621"}, + {file = "zope.interface-7.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6159e767d224d8f18deff634a1d3722e68d27488c357f62ebeb5f3e2f5288b1f"}, + {file = "zope.interface-7.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e956b1fd7f3448dd5e00f273072e73e50dfafcb35e4227e6d5af208075593c9"}, + {file = "zope.interface-7.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff115ef91c0eeac69cd92daeba36a9d8e14daee445b504eeea2b1c0b55821984"}, + {file = "zope.interface-7.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec001798ab62c3fc5447162bf48496ae9fba02edc295a9e10a0b0c639a6452e"}, + {file = "zope.interface-7.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:124149e2d42067b9c6597f4dafdc7a0983d0163868f897b7bb5dc850b14f9a87"}, + {file = "zope.interface-7.1.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:9733a9a0f94ef53d7aa64661811b20875b5bc6039034c6e42fb9732170130573"}, + {file = "zope.interface-7.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5fcf379b875c610b5a41bc8a891841533f98de0520287d7f85e25386cd10d3e9"}, + {file = "zope.interface-7.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0a45b5af9f72c805ee668d1479480ca85169312211bed6ed18c343e39307d5f"}, + {file = "zope.interface-7.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af4a12b459a273b0b34679a5c3dc5e34c1847c3dd14a628aa0668e19e638ea2"}, + {file = "zope.interface-7.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a735f82d2e3ed47ca01a20dfc4c779b966b16352650a8036ab3955aad151ed8a"}, + {file = "zope.interface-7.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:5501e772aff595e3c54266bc1bfc5858e8f38974ce413a8f1044aae0f32a83a3"}, + {file = "zope.interface-7.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec59fe53db7d32abb96c6d4efeed84aab4a7c38c62d7a901a9b20c09dd936e7a"}, + {file = "zope.interface-7.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e53c291debef523b09e1fe3dffe5f35dde164f1c603d77f770b88a1da34b7ed6"}, + {file = "zope.interface-7.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:711eebc77f2092c6a8b304bad0b81a6ce3cf5490b25574e7309fbc07d881e3af"}, + {file = "zope.interface-7.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a00ead2e24c76436e1b457a5132d87f83858330f6c923640b7ef82d668525d1"}, + {file = "zope.interface-7.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e28ea0bc4b084fc93a483877653a033062435317082cdc6388dec3438309faf"}, + {file = "zope.interface-7.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:27cfb5205d68b12682b6e55ab8424662d96e8ead19550aad0796b08dd2c9a45e"}, + {file = "zope.interface-7.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e3e48f3dea21c147e1b10c132016cb79af1159facca9736d231694ef5a740a8"}, + {file = "zope.interface-7.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a99240b1d02dc469f6afbe7da1bf617645e60290c272968f4e53feec18d7dce8"}, + {file = "zope.interface-7.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc8a318162123eddbdf22fcc7b751288ce52e4ad096d3766ff1799244352449d"}, + {file = "zope.interface-7.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7b25db127db3e6b597c5f74af60309c4ad65acd826f89609662f0dc33a54728"}, + {file = "zope.interface-7.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a29ac607e970b5576547f0e3589ec156e04de17af42839eedcf478450687317"}, + {file = "zope.interface-7.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:a14c9decf0eb61e0892631271d500c1e306c7b6901c998c7035e194d9150fdd1"}, + {file = "zope_interface-7.1.0.tar.gz", hash = "sha256:3f005869a1a05e368965adb2075f97f8ee9a26c61898a9e52a9764d93774f237"}, ] [package.dependencies] setuptools = "*" [package.extras] -docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"] -test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] -testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] +docs = ["Sphinx", "furo", "repoze.sphinx.autointerface"] +test = ["coverage[toml]", "zope.event", "zope.testing"] +testing = ["coverage[toml]", "zope.event", "zope.testing"] [[package]] name = "zstandard" @@ -10595,4 +10691,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "34ba8efcc67da342036ef075b693f59fdc67d246f40b857c9c1bd6f80c7283bd" +content-hash = "75a7e7eab36b9386c11a3e9808da28102ad20a43a0e8ae08c37594ecf50da02b" diff --git a/api/pyproject.toml b/api/pyproject.toml index 956d4dbd11..cc85ec3af6 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -182,7 +182,7 @@ websocket-client = "~1.7.0" werkzeug = "~3.0.1" xinference-client = "0.15.2" yarl = "~1.9.4" -zhipuai = "1.0.7" +zhipuai = "~2.1.5" # Before adding new dependency, consider place it in alphabet order (a-z) and suitable group. ############################################################ @@ -207,10 +207,9 @@ matplotlib = "~3.8.2" newspaper3k = "0.2.8" nltk = "3.8.1" numexpr = "~2.9.0" -opensearch-py = "2.4.0" qrcode = "~7.4.2" twilio = "~9.0.4" -vanna = { version = "0.5.5", extras = ["postgres", "mysql", "clickhouse", "duckdb"] } +vanna = { version = "0.7.3", extras = ["postgres", "mysql", "clickhouse", "duckdb"] } wikipedia = "1.4.0" yfinance = "~0.2.40" @@ -225,6 +224,7 @@ cos-python-sdk-v5 = "1.9.30" esdk-obs-python = "3.24.6.1" google-cloud-storage = "2.16.0" oss2 = "2.18.5" +supabase = "~2.8.1" tos = "~2.7.1" ############################################################ @@ -236,14 +236,17 @@ alibabacloud_gpdb20160503 = "~3.8.0" alibabacloud_tea_openapi = "~0.3.9" chromadb = "0.5.1" clickhouse-connect = "~0.7.16" -elasticsearch = "~8.15.1" +elasticsearch = "8.14.0" +opensearch-py = "2.4.0" oracledb = "~2.2.1" pgvecto-rs = { version = "~0.2.1", extras = ['sqlalchemy'] } pgvector = "0.2.5" pymilvus = "~2.4.4" +pymochow = "1.3.1" qdrant-client = "1.7.3" tcvectordb = "1.3.2" tidb-vector = "0.0.9" +volcengine-compat = "~1.0.156" weaviate-client = "~3.21.0" ############################################################ diff --git a/api/services/account_service.py b/api/services/account_service.py index 05b505f8a6..eda6011aef 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -7,6 +7,7 @@ from datetime import datetime, timedelta, timezone from hashlib import sha256 from typing import Any, Optional +from pydantic import BaseModel from sqlalchemy import func from werkzeug.exceptions import Unauthorized @@ -49,9 +50,39 @@ from tasks.mail_invite_member_task import send_invite_member_mail_task from tasks.mail_reset_password_task import send_reset_password_mail_task +class TokenPair(BaseModel): + access_token: str + refresh_token: str + + +REFRESH_TOKEN_PREFIX = "refresh_token:" +ACCOUNT_REFRESH_TOKEN_PREFIX = "account_refresh_token:" +REFRESH_TOKEN_EXPIRY = timedelta(days=30) + + class AccountService: reset_password_rate_limiter = RateLimiter(prefix="reset_password_rate_limit", max_attempts=5, time_window=60 * 60) + @staticmethod + def _get_refresh_token_key(refresh_token: str) -> str: + return f"{REFRESH_TOKEN_PREFIX}{refresh_token}" + + @staticmethod + def _get_account_refresh_token_key(account_id: str) -> str: + return f"{ACCOUNT_REFRESH_TOKEN_PREFIX}{account_id}" + + @staticmethod + def _store_refresh_token(refresh_token: str, account_id: str) -> None: + redis_client.setex(AccountService._get_refresh_token_key(refresh_token), REFRESH_TOKEN_EXPIRY, account_id) + redis_client.setex( + AccountService._get_account_refresh_token_key(account_id), REFRESH_TOKEN_EXPIRY, refresh_token + ) + + @staticmethod + def _delete_refresh_token(refresh_token: str, account_id: str) -> None: + redis_client.delete(AccountService._get_refresh_token_key(refresh_token)) + redis_client.delete(AccountService._get_account_refresh_token_key(account_id)) + @staticmethod def load_user(user_id: str) -> None | Account: account = Account.query.filter_by(id=user_id).first() @@ -61,9 +92,7 @@ class AccountService: if account.status in {AccountStatus.BANNED.value, AccountStatus.CLOSED.value}: raise Unauthorized("Account is banned or closed.") - current_tenant: TenantAccountJoin = TenantAccountJoin.query.filter_by( - account_id=account.id, current=True - ).first() + current_tenant = TenantAccountJoin.query.filter_by(account_id=account.id, current=True).first() if current_tenant: account.current_tenant_id = current_tenant.tenant_id else: @@ -84,10 +113,12 @@ class AccountService: return account @staticmethod - def get_account_jwt_token(account, *, exp: timedelta = timedelta(days=30)): + def get_account_jwt_token(account: Account) -> str: + exp_dt = datetime.now(timezone.utc) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES) + exp = int(exp_dt.timestamp()) payload = { "user_id": account.id, - "exp": datetime.now(timezone.utc).replace(tzinfo=None) + exp, + "exp": exp, "iss": dify_config.EDITION, "sub": "Console API Passport", } @@ -213,7 +244,7 @@ class AccountService: return account @staticmethod - def update_last_login(account: Account, *, ip_address: str) -> None: + def update_login_info(account: Account, *, ip_address: str) -> None: """Update last login time and ip""" account.last_login_at = datetime.now(timezone.utc).replace(tzinfo=None) account.last_login_ip = ip_address @@ -221,22 +252,45 @@ class AccountService: db.session.commit() @staticmethod - def login(account: Account, *, ip_address: Optional[str] = None): + def login(account: Account, *, ip_address: Optional[str] = None) -> TokenPair: if ip_address: - AccountService.update_last_login(account, ip_address=ip_address) - exp = timedelta(days=30) - token = AccountService.get_account_jwt_token(account, exp=exp) - redis_client.set(_get_login_cache_key(account_id=account.id, token=token), "1", ex=int(exp.total_seconds())) - return token + AccountService.update_login_info(account=account, ip_address=ip_address) + + access_token = AccountService.get_account_jwt_token(account=account) + refresh_token = _generate_refresh_token() + + AccountService._store_refresh_token(refresh_token, account.id) + + return TokenPair(access_token=access_token, refresh_token=refresh_token) @staticmethod - def logout(*, account: Account, token: str): - redis_client.delete(_get_login_cache_key(account_id=account.id, token=token)) + def logout(*, account: Account) -> None: + refresh_token = redis_client.get(AccountService._get_account_refresh_token_key(account.id)) + if refresh_token: + AccountService._delete_refresh_token(refresh_token.decode("utf-8"), account.id) @staticmethod - def load_logged_in_account(*, account_id: str, token: str): - if not redis_client.get(_get_login_cache_key(account_id=account_id, token=token)): - return None + def refresh_token(refresh_token: str) -> TokenPair: + # Verify the refresh token + account_id = redis_client.get(AccountService._get_refresh_token_key(refresh_token)) + if not account_id: + raise ValueError("Invalid refresh token") + + account = AccountService.load_user(account_id.decode("utf-8")) + if not account: + raise ValueError("Invalid account") + + # Generate new access token and refresh token + new_access_token = AccountService.get_account_jwt_token(account) + new_refresh_token = _generate_refresh_token() + + AccountService._delete_refresh_token(refresh_token, account.id) + AccountService._store_refresh_token(new_refresh_token, account.id) + + return TokenPair(access_token=new_access_token, refresh_token=new_refresh_token) + + @staticmethod + def load_logged_in_account(*, account_id: str): return AccountService.load_user(account_id) @classmethod @@ -258,10 +312,6 @@ class AccountService: return TokenManager.get_token_data(token, "reset_password") -def _get_login_cache_key(*, account_id: str, token: str): - return f"account_login:{account_id}:{token}" - - class TenantService: @staticmethod def create_tenant(name: str) -> Tenant: @@ -698,3 +748,8 @@ class RegisterService: invitation = json.loads(data) return invitation + + +def _generate_refresh_token(length: int = 64): + token = secrets.token_hex(length) + return token diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index b8f80a9f77..ede8764086 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -8,6 +8,7 @@ from typing import Optional from flask_login import current_user from sqlalchemy import func +from werkzeug.exceptions import NotFound from configs import dify_config from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError @@ -233,6 +234,7 @@ class DatasetService: dataset.name = data.get("name", dataset.name) dataset.description = data.get("description", "") external_knowledge_id = data.get("external_knowledge_id", None) + dataset.permission = data.get("permission") db.session.add(dataset) if not external_knowledge_id: raise ValueError("External knowledge id is required.") @@ -975,6 +977,8 @@ class DocumentService: ): DatasetService.check_dataset_model_setting(dataset) document = DocumentService.get_document(dataset.id, document_data["original_document_id"]) + if document is None: + raise NotFound("Document not found") if document.display_status != "available": raise ValueError("Document is not available") # update document name diff --git a/api/services/enterprise/base.py b/api/services/enterprise/base.py index ddee52164b..7d4fdfd2d0 100644 --- a/api/services/enterprise/base.py +++ b/api/services/enterprise/base.py @@ -7,11 +7,16 @@ class EnterpriseRequest: base_url = os.environ.get("ENTERPRISE_API_URL", "ENTERPRISE_API_URL") secret_key = os.environ.get("ENTERPRISE_API_SECRET_KEY", "ENTERPRISE_API_SECRET_KEY") + proxies = { + "http": None, + "https": None, + } + @classmethod def send_request(cls, method, endpoint, json=None, params=None): headers = {"Content-Type": "application/json", "Enterprise-Api-Secret-Key": cls.secret_key} url = f"{cls.base_url}{endpoint}" - response = requests.request(method, url, json=json, params=params, headers=headers) + response = requests.request(method, url, json=json, params=params, headers=headers, proxies=cls.proxies) return response.json() diff --git a/api/services/knowledge_service.py b/api/services/knowledge_service.py new file mode 100644 index 0000000000..02fe1d19bc --- /dev/null +++ b/api/services/knowledge_service.py @@ -0,0 +1,45 @@ +import boto3 + +from configs import dify_config + + +class ExternalDatasetTestService: + # this service is only for internal testing + @staticmethod + def knowledge_retrieval(retrieval_setting: dict, query: str, knowledge_id: str): + # get bedrock client + client = boto3.client( + "bedrock-agent-runtime", + aws_secret_access_key=dify_config.AWS_SECRET_ACCESS_KEY, + aws_access_key_id=dify_config.AWS_ACCESS_KEY_ID, + # example: us-east-1 + region_name="us-east-1", + ) + # fetch external knowledge retrieval + response = client.retrieve( + knowledgeBaseId=knowledge_id, + retrievalConfiguration={ + "vectorSearchConfiguration": { + "numberOfResults": retrieval_setting.get("top_k"), + "overrideSearchType": "HYBRID", + } + }, + retrievalQuery={"text": query}, + ) + # parse response + results = [] + if response.get("ResponseMetadata") and response.get("ResponseMetadata").get("HTTPStatusCode") == 200: + if response.get("retrievalResults"): + retrieval_results = response.get("retrievalResults") + for retrieval_result in retrieval_results: + # filter out results with score less than threshold + if retrieval_result.get("score") < retrieval_setting.get("score_threshold", 0.0): + continue + result = { + "metadata": retrieval_result.get("metadata"), + "score": retrieval_result.get("score"), + "title": retrieval_result.get("metadata").get("x-amz-bedrock-kb-source-uri"), + "content": retrieval_result.get("content").get("text"), + } + results.append(result) + return {"records": results} diff --git a/api/services/recommend_app/__init__.py b/api/services/recommend_app/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/services/recommend_app/buildin/__init__.py b/api/services/recommend_app/buildin/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/services/recommend_app/buildin/buildin_retrieval.py b/api/services/recommend_app/buildin/buildin_retrieval.py new file mode 100644 index 0000000000..4704d533a9 --- /dev/null +++ b/api/services/recommend_app/buildin/buildin_retrieval.py @@ -0,0 +1,64 @@ +import json +from os import path +from pathlib import Path +from typing import Optional + +from flask import current_app + +from services.recommend_app.recommend_app_base import RecommendAppRetrievalBase +from services.recommend_app.recommend_app_type import RecommendAppType + + +class BuildInRecommendAppRetrieval(RecommendAppRetrievalBase): + """ + Retrieval recommended app from buildin, the location is constants/recommended_apps.json + """ + + builtin_data: Optional[dict] = None + + def get_type(self) -> str: + return RecommendAppType.BUILDIN + + def get_recommended_apps_and_categories(self, language: str) -> dict: + result = self.fetch_recommended_apps_from_builtin(language) + return result + + def get_recommend_app_detail(self, app_id: str): + result = self.fetch_recommended_app_detail_from_builtin(app_id) + return result + + @classmethod + def _get_builtin_data(cls) -> dict: + """ + Get builtin data. + :return: + """ + if cls.builtin_data: + return cls.builtin_data + + root_path = current_app.root_path + cls.builtin_data = json.loads( + Path(path.join(root_path, "constants", "recommended_apps.json")).read_text(encoding="utf-8") + ) + + return cls.builtin_data + + @classmethod + def fetch_recommended_apps_from_builtin(cls, language: str) -> dict: + """ + Fetch recommended apps from builtin. + :param language: language + :return: + """ + builtin_data = cls._get_builtin_data() + return builtin_data.get("recommended_apps", {}).get(language) + + @classmethod + def fetch_recommended_app_detail_from_builtin(cls, app_id: str) -> Optional[dict]: + """ + Fetch recommended app detail from builtin. + :param app_id: App ID + :return: + """ + builtin_data = cls._get_builtin_data() + return builtin_data.get("app_details", {}).get(app_id) diff --git a/api/services/recommend_app/database/__init__.py b/api/services/recommend_app/database/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/services/recommend_app/database/database_retrieval.py b/api/services/recommend_app/database/database_retrieval.py new file mode 100644 index 0000000000..995d3755bb --- /dev/null +++ b/api/services/recommend_app/database/database_retrieval.py @@ -0,0 +1,111 @@ +from typing import Optional + +from constants.languages import languages +from extensions.ext_database import db +from models.model import App, RecommendedApp +from services.app_dsl_service import AppDslService +from services.recommend_app.recommend_app_base import RecommendAppRetrievalBase +from services.recommend_app.recommend_app_type import RecommendAppType + + +class DatabaseRecommendAppRetrieval(RecommendAppRetrievalBase): + """ + Retrieval recommended app from database + """ + + def get_recommended_apps_and_categories(self, language: str) -> dict: + result = self.fetch_recommended_apps_from_db(language) + return result + + def get_recommend_app_detail(self, app_id: str): + result = self.fetch_recommended_app_detail_from_db(app_id) + return result + + def get_type(self) -> str: + return RecommendAppType.DATABASE + + @classmethod + def fetch_recommended_apps_from_db(cls, language: str) -> dict: + """ + Fetch recommended apps from db. + :param language: language + :return: + """ + recommended_apps = ( + db.session.query(RecommendedApp) + .filter(RecommendedApp.is_listed == True, RecommendedApp.language == language) + .all() + ) + + if len(recommended_apps) == 0: + recommended_apps = ( + db.session.query(RecommendedApp) + .filter(RecommendedApp.is_listed == True, RecommendedApp.language == languages[0]) + .all() + ) + + categories = set() + recommended_apps_result = [] + for recommended_app in recommended_apps: + app = recommended_app.app + if not app or not app.is_public: + continue + + site = app.site + if not site: + continue + + recommended_app_result = { + "id": recommended_app.id, + "app": { + "id": app.id, + "name": app.name, + "mode": app.mode, + "icon": app.icon, + "icon_background": app.icon_background, + }, + "app_id": recommended_app.app_id, + "description": site.description, + "copyright": site.copyright, + "privacy_policy": site.privacy_policy, + "custom_disclaimer": site.custom_disclaimer, + "category": recommended_app.category, + "position": recommended_app.position, + "is_listed": recommended_app.is_listed, + } + recommended_apps_result.append(recommended_app_result) + + categories.add(recommended_app.category) + + return {"recommended_apps": recommended_apps_result, "categories": sorted(categories)} + + @classmethod + def fetch_recommended_app_detail_from_db(cls, app_id: str) -> Optional[dict]: + """ + Fetch recommended app detail from db. + :param app_id: App ID + :return: + """ + # is in public recommended list + recommended_app = ( + db.session.query(RecommendedApp) + .filter(RecommendedApp.is_listed == True, RecommendedApp.app_id == app_id) + .first() + ) + + if not recommended_app: + return None + + # get app detail + app_model = db.session.query(App).filter(App.id == app_id).first() + if not app_model or not app_model.is_public: + return None + + return { + "id": app_model.id, + "name": app_model.name, + "icon": app_model.icon, + "icon_background": app_model.icon_background, + "mode": app_model.mode, + "export_data": AppDslService.export_dsl(app_model=app_model), + } diff --git a/api/services/recommend_app/recommend_app_base.py b/api/services/recommend_app/recommend_app_base.py new file mode 100644 index 0000000000..00c037710e --- /dev/null +++ b/api/services/recommend_app/recommend_app_base.py @@ -0,0 +1,17 @@ +from abc import ABC, abstractmethod + + +class RecommendAppRetrievalBase(ABC): + """Interface for recommend app retrieval.""" + + @abstractmethod + def get_recommended_apps_and_categories(self, language: str) -> dict: + raise NotImplementedError + + @abstractmethod + def get_recommend_app_detail(self, app_id: str): + raise NotImplementedError + + @abstractmethod + def get_type(self) -> str: + raise NotImplementedError diff --git a/api/services/recommend_app/recommend_app_factory.py b/api/services/recommend_app/recommend_app_factory.py new file mode 100644 index 0000000000..e53667c0b0 --- /dev/null +++ b/api/services/recommend_app/recommend_app_factory.py @@ -0,0 +1,23 @@ +from services.recommend_app.buildin.buildin_retrieval import BuildInRecommendAppRetrieval +from services.recommend_app.database.database_retrieval import DatabaseRecommendAppRetrieval +from services.recommend_app.recommend_app_base import RecommendAppRetrievalBase +from services.recommend_app.recommend_app_type import RecommendAppType +from services.recommend_app.remote.remote_retrieval import RemoteRecommendAppRetrieval + + +class RecommendAppRetrievalFactory: + @staticmethod + def get_recommend_app_factory(mode: str) -> type[RecommendAppRetrievalBase]: + match mode: + case RecommendAppType.REMOTE: + return RemoteRecommendAppRetrieval + case RecommendAppType.DATABASE: + return DatabaseRecommendAppRetrieval + case RecommendAppType.BUILDIN: + return BuildInRecommendAppRetrieval + case _: + raise ValueError(f"invalid fetch recommended apps mode: {mode}") + + @staticmethod + def get_buildin_recommend_app_retrieval(): + return BuildInRecommendAppRetrieval diff --git a/api/services/recommend_app/recommend_app_type.py b/api/services/recommend_app/recommend_app_type.py new file mode 100644 index 0000000000..7ea93b3f64 --- /dev/null +++ b/api/services/recommend_app/recommend_app_type.py @@ -0,0 +1,7 @@ +from enum import Enum + + +class RecommendAppType(str, Enum): + REMOTE = "remote" + BUILDIN = "builtin" + DATABASE = "db" diff --git a/api/services/recommend_app/remote/__init__.py b/api/services/recommend_app/remote/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/services/recommend_app/remote/remote_retrieval.py b/api/services/recommend_app/remote/remote_retrieval.py new file mode 100644 index 0000000000..b0607a2132 --- /dev/null +++ b/api/services/recommend_app/remote/remote_retrieval.py @@ -0,0 +1,71 @@ +import logging +from typing import Optional + +import requests + +from configs import dify_config +from services.recommend_app.buildin.buildin_retrieval import BuildInRecommendAppRetrieval +from services.recommend_app.recommend_app_base import RecommendAppRetrievalBase +from services.recommend_app.recommend_app_type import RecommendAppType + +logger = logging.getLogger(__name__) + + +class RemoteRecommendAppRetrieval(RecommendAppRetrievalBase): + """ + Retrieval recommended app from dify official + """ + + def get_recommend_app_detail(self, app_id: str): + try: + result = self.fetch_recommended_app_detail_from_dify_official(app_id) + except Exception as e: + logger.warning(f"fetch recommended app detail from dify official failed: {e}, switch to built-in.") + result = BuildInRecommendAppRetrieval.fetch_recommended_app_detail_from_builtin(app_id) + return result + + def get_recommended_apps_and_categories(self, language: str) -> dict: + try: + result = self.fetch_recommended_apps_from_dify_official(language) + except Exception as e: + logger.warning(f"fetch recommended apps from dify official failed: {e}, switch to built-in.") + result = BuildInRecommendAppRetrieval.fetch_recommended_apps_from_builtin(language) + return result + + def get_type(self) -> str: + return RecommendAppType.REMOTE + + @classmethod + def fetch_recommended_app_detail_from_dify_official(cls, app_id: str) -> Optional[dict]: + """ + Fetch recommended app detail from dify official. + :param app_id: App ID + :return: + """ + domain = dify_config.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN + url = f"{domain}/apps/{app_id}" + response = requests.get(url, timeout=(3, 10)) + if response.status_code != 200: + return None + + return response.json() + + @classmethod + def fetch_recommended_apps_from_dify_official(cls, language: str) -> dict: + """ + Fetch recommended apps from dify official. + :param language: language + :return: + """ + domain = dify_config.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN + url = f"{domain}/apps?language={language}" + response = requests.get(url, timeout=(3, 10)) + if response.status_code != 200: + raise ValueError(f"fetch recommended apps failed, status code: {response.status_code}") + + result = response.json() + + if "categories" in result: + result["categories"] = sorted(result["categories"]) + + return result diff --git a/api/services/recommended_app_service.py b/api/services/recommended_app_service.py index daec8393d0..4660316fcf 100644 --- a/api/services/recommended_app_service.py +++ b/api/services/recommended_app_service.py @@ -1,24 +1,10 @@ -import json -import logging -from os import path -from pathlib import Path from typing import Optional -import requests -from flask import current_app - from configs import dify_config -from constants.languages import languages -from extensions.ext_database import db -from models.model import App, RecommendedApp -from services.app_dsl_service import AppDslService - -logger = logging.getLogger(__name__) +from services.recommend_app.recommend_app_factory import RecommendAppRetrievalFactory class RecommendedAppService: - builtin_data: Optional[dict] = None - @classmethod def get_recommended_apps_and_categories(cls, language: str) -> dict: """ @@ -27,109 +13,17 @@ class RecommendedAppService: :return: """ mode = dify_config.HOSTED_FETCH_APP_TEMPLATES_MODE - if mode == "remote": - try: - result = cls._fetch_recommended_apps_from_dify_official(language) - except Exception as e: - logger.warning(f"fetch recommended apps from dify official failed: {e}, switch to built-in.") - result = cls._fetch_recommended_apps_from_builtin(language) - elif mode == "db": - result = cls._fetch_recommended_apps_from_db(language) - elif mode == "builtin": - result = cls._fetch_recommended_apps_from_builtin(language) - else: - raise ValueError(f"invalid fetch recommended apps mode: {mode}") - + retrieval_instance = RecommendAppRetrievalFactory.get_recommend_app_factory(mode)() + result = retrieval_instance.get_recommended_apps_and_categories(language) if not result.get("recommended_apps") and language != "en-US": - result = cls._fetch_recommended_apps_from_builtin("en-US") - - return result - - @classmethod - def _fetch_recommended_apps_from_db(cls, language: str) -> dict: - """ - Fetch recommended apps from db. - :param language: language - :return: - """ - recommended_apps = ( - db.session.query(RecommendedApp) - .filter(RecommendedApp.is_listed == True, RecommendedApp.language == language) - .all() - ) - - if len(recommended_apps) == 0: - recommended_apps = ( - db.session.query(RecommendedApp) - .filter(RecommendedApp.is_listed == True, RecommendedApp.language == languages[0]) - .all() + result = ( + RecommendAppRetrievalFactory.get_buildin_recommend_app_retrieval().fetch_recommended_apps_from_builtin( + "en-US" + ) ) - categories = set() - recommended_apps_result = [] - for recommended_app in recommended_apps: - app = recommended_app.app - if not app or not app.is_public: - continue - - site = app.site - if not site: - continue - - recommended_app_result = { - "id": recommended_app.id, - "app": { - "id": app.id, - "name": app.name, - "mode": app.mode, - "icon": app.icon, - "icon_background": app.icon_background, - }, - "app_id": recommended_app.app_id, - "description": site.description, - "copyright": site.copyright, - "privacy_policy": site.privacy_policy, - "custom_disclaimer": site.custom_disclaimer, - "category": recommended_app.category, - "position": recommended_app.position, - "is_listed": recommended_app.is_listed, - } - recommended_apps_result.append(recommended_app_result) - - categories.add(recommended_app.category) # add category to categories - - return {"recommended_apps": recommended_apps_result, "categories": sorted(categories)} - - @classmethod - def _fetch_recommended_apps_from_dify_official(cls, language: str) -> dict: - """ - Fetch recommended apps from dify official. - :param language: language - :return: - """ - domain = dify_config.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN - url = f"{domain}/apps?language={language}" - response = requests.get(url, timeout=(3, 10)) - if response.status_code != 200: - raise ValueError(f"fetch recommended apps failed, status code: {response.status_code}") - - result = response.json() - - if "categories" in result: - result["categories"] = sorted(result["categories"]) - return result - @classmethod - def _fetch_recommended_apps_from_builtin(cls, language: str) -> dict: - """ - Fetch recommended apps from builtin. - :param language: language - :return: - """ - builtin_data = cls._get_builtin_data() - return builtin_data.get("recommended_apps", {}).get(language) - @classmethod def get_recommend_app_detail(cls, app_id: str) -> Optional[dict]: """ @@ -138,117 +32,6 @@ class RecommendedAppService: :return: """ mode = dify_config.HOSTED_FETCH_APP_TEMPLATES_MODE - if mode == "remote": - try: - result = cls._fetch_recommended_app_detail_from_dify_official(app_id) - except Exception as e: - logger.warning(f"fetch recommended app detail from dify official failed: {e}, switch to built-in.") - result = cls._fetch_recommended_app_detail_from_builtin(app_id) - elif mode == "db": - result = cls._fetch_recommended_app_detail_from_db(app_id) - elif mode == "builtin": - result = cls._fetch_recommended_app_detail_from_builtin(app_id) - else: - raise ValueError(f"invalid fetch recommended app detail mode: {mode}") - + retrieval_instance = RecommendAppRetrievalFactory.get_recommend_app_factory(mode)() + result = retrieval_instance.get_recommend_app_detail(app_id) return result - - @classmethod - def _fetch_recommended_app_detail_from_dify_official(cls, app_id: str) -> Optional[dict]: - """ - Fetch recommended app detail from dify official. - :param app_id: App ID - :return: - """ - domain = dify_config.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN - url = f"{domain}/apps/{app_id}" - response = requests.get(url, timeout=(3, 10)) - if response.status_code != 200: - return None - - return response.json() - - @classmethod - def _fetch_recommended_app_detail_from_db(cls, app_id: str) -> Optional[dict]: - """ - Fetch recommended app detail from db. - :param app_id: App ID - :return: - """ - # is in public recommended list - recommended_app = ( - db.session.query(RecommendedApp) - .filter(RecommendedApp.is_listed == True, RecommendedApp.app_id == app_id) - .first() - ) - - if not recommended_app: - return None - - # get app detail - app_model = db.session.query(App).filter(App.id == app_id).first() - if not app_model or not app_model.is_public: - return None - - return { - "id": app_model.id, - "name": app_model.name, - "icon": app_model.icon, - "icon_background": app_model.icon_background, - "mode": app_model.mode, - "export_data": AppDslService.export_dsl(app_model=app_model), - } - - @classmethod - def _fetch_recommended_app_detail_from_builtin(cls, app_id: str) -> Optional[dict]: - """ - Fetch recommended app detail from builtin. - :param app_id: App ID - :return: - """ - builtin_data = cls._get_builtin_data() - return builtin_data.get("app_details", {}).get(app_id) - - @classmethod - def _get_builtin_data(cls) -> dict: - """ - Get builtin data. - :return: - """ - if cls.builtin_data: - return cls.builtin_data - - root_path = current_app.root_path - cls.builtin_data = json.loads( - Path(path.join(root_path, "constants", "recommended_apps.json")).read_text(encoding="utf-8") - ) - - return cls.builtin_data - - @classmethod - def fetch_all_recommended_apps_and_export_datas(cls): - """ - Fetch all recommended apps and export datas - :return: - """ - templates = {"recommended_apps": {}, "app_details": {}} - for language in languages: - try: - result = cls._fetch_recommended_apps_from_dify_official(language) - except Exception as e: - logger.warning(f"fetch recommended apps from dify official failed: {e}, skip.") - continue - - templates["recommended_apps"][language] = result - - for recommended_app in result.get("recommended_apps"): - app_id = recommended_app.get("app_id") - - # get app detail - app_detail = cls._fetch_recommended_app_detail_from_dify_official(app_id) - if not app_detail: - continue - - templates["app_details"][app_id] = app_detail - - return templates diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index 9f5298a506..257c6cf52b 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -80,7 +80,9 @@ class ApiToolManageService: raise ValueError(f"invalid schema: {str(e)}") @staticmethod - def convert_schema_to_tool_bundles(schema: str, extra_info: Optional[dict] = None) -> list[ApiToolBundle]: + def convert_schema_to_tool_bundles( + schema: str, extra_info: Optional[dict] = None + ) -> tuple[list[ApiToolBundle], str]: """ convert schema to tool bundles diff --git a/api/tests/artifact_tests/dependencies/test_dependencies_sorted.py b/api/tests/artifact_tests/dependencies/test_dependencies_sorted.py index 518cee1a3a..64f2884c4b 100644 --- a/api/tests/artifact_tests/dependencies/test_dependencies_sorted.py +++ b/api/tests/artifact_tests/dependencies/test_dependencies_sorted.py @@ -2,36 +2,23 @@ from typing import Any import toml -ALL_DEPENDENCY_GROUP_NAMES = [ - # default main group - "", - # required groups - "indirect", - "storage", - "tools", - "vdb", - # optional groups - "dev", - "lint", -] - def load_api_poetry_configs() -> dict[str, Any]: pyproject_toml = toml.load("api/pyproject.toml") - return pyproject_toml.get("tool").get("poetry") + return pyproject_toml["tool"]["poetry"] -def load_dependency_groups() -> dict[str, dict[str, dict[str, Any]]]: - poetry_configs = load_api_poetry_configs() - group_name_to_dependencies = { - group_name: (poetry_configs.get("group").get(group_name) if group_name else poetry_configs).get("dependencies") - for group_name in ALL_DEPENDENCY_GROUP_NAMES - } - return group_name_to_dependencies +def load_all_dependency_groups() -> dict[str, dict[str, dict[str, Any]]]: + configs = load_api_poetry_configs() + configs_by_group = {"main": configs} + for group_name in configs["group"]: + configs_by_group[group_name] = configs["group"][group_name] + dependencies_by_group = {group_name: base["dependencies"] for group_name, base in configs_by_group.items()} + return dependencies_by_group def test_group_dependencies_sorted(): - for group_name, dependencies in load_dependency_groups().items(): + for group_name, dependencies in load_all_dependency_groups().items(): dependency_names = list(dependencies.keys()) expected_dependency_names = sorted(set(dependency_names)) section = f"tool.poetry.group.{group_name}.dependencies" if group_name else "tool.poetry.dependencies" @@ -42,17 +29,18 @@ def test_group_dependencies_sorted(): def test_group_dependencies_version_operator(): - for group_name, dependencies in load_dependency_groups().items(): + for group_name, dependencies in load_all_dependency_groups().items(): for dependency_name, specification in dependencies.items(): - version_spec = specification if isinstance(specification, str) else specification.get("version") + version_spec = specification if isinstance(specification, str) else specification["version"] assert not version_spec.startswith("^"), ( - f"'^' is not allowed in dependency version," f" but found in '{dependency_name} = {version_spec}'" + f"Please replace '{dependency_name} = {version_spec}' with '{dependency_name} = ~{version_spec[1:]}' " + f"'^' operator is too wide and not allowed in the version specification." ) def test_duplicated_dependency_crossing_groups(): all_dependency_names: list[str] = [] - for dependencies in load_dependency_groups().values(): + for dependencies in load_all_dependency_groups().values(): dependency_names = list(dependencies.keys()) all_dependency_names.extend(dependency_names) expected_all_dependency_names = set(all_dependency_names) diff --git a/api/tests/integration_tests/model_runtime/tongyi/test_rerank.py b/api/tests/integration_tests/model_runtime/tongyi/test_rerank.py new file mode 100644 index 0000000000..2dcfb92c63 --- /dev/null +++ b/api/tests/integration_tests/model_runtime/tongyi/test_rerank.py @@ -0,0 +1,40 @@ +import os + +import dashscope +import pytest + +from core.model_runtime.entities.rerank_entities import RerankResult +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.tongyi.rerank.rerank import GTERerankModel + + +def test_validate_credentials(): + model = GTERerankModel() + + with pytest.raises(CredentialsValidateFailedError): + model.validate_credentials(model="get-rank", credentials={"dashscope_api_key": "invalid_key"}) + + model.validate_credentials( + model="get-rank", credentials={"dashscope_api_key": os.environ.get("TONGYI_DASHSCOPE_API_KEY")} + ) + + +def test_invoke_model(): + model = GTERerankModel() + + result = model.invoke( + model=dashscope.TextReRank.Models.gte_rerank, + credentials={"dashscope_api_key": os.environ.get("TONGYI_DASHSCOPE_API_KEY")}, + query="什么是文本排序模型", + docs=[ + "文本排序模型广泛用于搜索引擎和推荐系统中,它们根据文本相关性对候选文本进行排序", + "量子计算是计算科学的一个前沿领域", + "预训练语言模型的发展给文本排序模型带来了新的进展", + ], + score_threshold=0.7, + ) + + assert isinstance(result, RerankResult) + assert len(result.docs) == 1 + assert result.docs[0].index == 0 + assert result.docs[0].score >= 0.7 diff --git a/api/tests/integration_tests/vdb/__mock/baiduvectordb.py b/api/tests/integration_tests/vdb/__mock/baiduvectordb.py new file mode 100644 index 0000000000..a8eaf42b7d --- /dev/null +++ b/api/tests/integration_tests/vdb/__mock/baiduvectordb.py @@ -0,0 +1,154 @@ +import os + +import pytest +from _pytest.monkeypatch import MonkeyPatch +from pymochow import MochowClient +from pymochow.model.database import Database +from pymochow.model.enum import IndexState, IndexType, MetricType, ReadConsistency, TableState +from pymochow.model.schema import HNSWParams, VectorIndex +from pymochow.model.table import Table +from requests.adapters import HTTPAdapter + + +class MockBaiduVectorDBClass: + def mock_vector_db_client( + self, + config=None, + adapter: HTTPAdapter = None, + ): + self._conn = None + self._config = None + + def list_databases(self, config=None) -> list[Database]: + return [ + Database( + conn=self._conn, + database_name="dify", + config=self._config, + ) + ] + + def create_database(self, database_name: str, config=None) -> Database: + return Database(conn=self._conn, database_name=database_name, config=config) + + def list_table(self, config=None) -> list[Table]: + return [] + + def drop_table(self, table_name: str, config=None): + return {"code": 0, "msg": "Success"} + + def create_table( + self, + table_name: str, + replication: int, + partition: int, + schema, + enable_dynamic_field=False, + description: str = "", + config=None, + ) -> Table: + return Table(self, table_name, replication, partition, schema, enable_dynamic_field, description, config) + + def describe_table(self, table_name: str, config=None) -> Table: + return Table( + self, + table_name, + 3, + 1, + None, + enable_dynamic_field=False, + description="table for dify", + config=config, + state=TableState.NORMAL, + ) + + def upsert(self, rows, config=None): + return {"code": 0, "msg": "operation success", "affectedCount": 1} + + def rebuild_index(self, index_name: str, config=None): + return {"code": 0, "msg": "Success"} + + def describe_index(self, index_name: str, config=None): + return VectorIndex( + index_name=index_name, + index_type=IndexType.HNSW, + field="vector", + metric_type=MetricType.L2, + params=HNSWParams(m=16, efconstruction=200), + auto_build=False, + state=IndexState.NORMAL, + ) + + def query( + self, + primary_key, + partition_key=None, + projections=None, + retrieve_vector=False, + read_consistency=ReadConsistency.EVENTUAL, + config=None, + ): + return { + "row": { + "id": "doc_id_001", + "vector": [0.23432432, 0.8923744, 0.89238432], + "text": "text", + "metadata": {"doc_id": "doc_id_001"}, + }, + "code": 0, + "msg": "Success", + } + + def delete(self, primary_key=None, partition_key=None, filter=None, config=None): + return {"code": 0, "msg": "Success"} + + def search( + self, + anns, + partition_key=None, + projections=None, + retrieve_vector=False, + read_consistency=ReadConsistency.EVENTUAL, + config=None, + ): + return { + "rows": [ + { + "row": { + "id": "doc_id_001", + "vector": [0.23432432, 0.8923744, 0.89238432], + "text": "text", + "metadata": {"doc_id": "doc_id_001"}, + }, + "distance": 0.1, + "score": 0.5, + } + ], + "code": 0, + "msg": "Success", + } + + +MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" + + +@pytest.fixture +def setup_baiduvectordb_mock(request, monkeypatch: MonkeyPatch): + if MOCK: + monkeypatch.setattr(MochowClient, "__init__", MockBaiduVectorDBClass.mock_vector_db_client) + monkeypatch.setattr(MochowClient, "list_databases", MockBaiduVectorDBClass.list_databases) + monkeypatch.setattr(MochowClient, "create_database", MockBaiduVectorDBClass.create_database) + monkeypatch.setattr(Database, "table", MockBaiduVectorDBClass.describe_table) + monkeypatch.setattr(Database, "list_table", MockBaiduVectorDBClass.list_table) + monkeypatch.setattr(Database, "create_table", MockBaiduVectorDBClass.create_table) + monkeypatch.setattr(Database, "drop_table", MockBaiduVectorDBClass.drop_table) + monkeypatch.setattr(Database, "describe_table", MockBaiduVectorDBClass.describe_table) + monkeypatch.setattr(Table, "rebuild_index", MockBaiduVectorDBClass.rebuild_index) + monkeypatch.setattr(Table, "describe_index", MockBaiduVectorDBClass.describe_index) + monkeypatch.setattr(Table, "delete", MockBaiduVectorDBClass.delete) + monkeypatch.setattr(Table, "search", MockBaiduVectorDBClass.search) + + yield + + if MOCK: + monkeypatch.undo() diff --git a/api/tests/integration_tests/vdb/__mock/vikingdb.py b/api/tests/integration_tests/vdb/__mock/vikingdb.py new file mode 100644 index 0000000000..0f40337feb --- /dev/null +++ b/api/tests/integration_tests/vdb/__mock/vikingdb.py @@ -0,0 +1,215 @@ +import os +from typing import Union +from unittest.mock import MagicMock + +import pytest +from _pytest.monkeypatch import MonkeyPatch +from volcengine.viking_db import ( + Collection, + Data, + DistanceType, + Field, + FieldType, + Index, + IndexType, + QuantType, + VectorIndexParams, + VikingDBService, +) + +from core.rag.datasource.vdb.field import Field as vdb_Field + + +class MockVikingDBClass: + def __init__( + self, + host="api-vikingdb.volces.com", + region="cn-north-1", + ak="", + sk="", + scheme="http", + connection_timeout=30, + socket_timeout=30, + proxy=None, + ): + self._viking_db_service = MagicMock() + self._viking_db_service.get_exception = MagicMock(return_value='{"data": {"primary_key": "test_id"}}') + + def get_collection(self, collection_name) -> Collection: + return Collection( + collection_name=collection_name, + description="Collection For Dify", + viking_db_service=self._viking_db_service, + primary_key=vdb_Field.PRIMARY_KEY.value, + fields=[ + Field(field_name=vdb_Field.PRIMARY_KEY.value, field_type=FieldType.String, is_primary_key=True), + Field(field_name=vdb_Field.METADATA_KEY.value, field_type=FieldType.String), + Field(field_name=vdb_Field.GROUP_KEY.value, field_type=FieldType.String), + Field(field_name=vdb_Field.CONTENT_KEY.value, field_type=FieldType.Text), + Field(field_name=vdb_Field.VECTOR.value, field_type=FieldType.Vector, dim=768), + ], + indexes=[ + Index( + collection_name=collection_name, + index_name=f"{collection_name}_idx", + vector_index=VectorIndexParams( + distance=DistanceType.L2, + index_type=IndexType.HNSW, + quant=QuantType.Float, + ), + scalar_index=None, + stat=None, + viking_db_service=self._viking_db_service, + ) + ], + ) + + def drop_collection(self, collection_name): + assert collection_name != "" + + def create_collection(self, collection_name, fields, description="") -> Collection: + return Collection( + collection_name=collection_name, + description=description, + primary_key=vdb_Field.PRIMARY_KEY.value, + viking_db_service=self._viking_db_service, + fields=fields, + ) + + def get_index(self, collection_name, index_name) -> Index: + return Index( + collection_name=collection_name, + index_name=index_name, + viking_db_service=self._viking_db_service, + stat=None, + scalar_index=None, + vector_index=VectorIndexParams( + distance=DistanceType.L2, + index_type=IndexType.HNSW, + quant=QuantType.Float, + ), + ) + + def create_index( + self, + collection_name, + index_name, + vector_index=None, + cpu_quota=2, + description="", + partition_by="", + scalar_index=None, + shard_count=None, + shard_policy=None, + ): + return Index( + collection_name=collection_name, + index_name=index_name, + vector_index=vector_index, + cpu_quota=cpu_quota, + description=description, + partition_by=partition_by, + scalar_index=scalar_index, + shard_count=shard_count, + shard_policy=shard_policy, + viking_db_service=self._viking_db_service, + stat=None, + ) + + def drop_index(self, collection_name, index_name): + assert collection_name != "" + assert index_name != "" + + def upsert_data(self, data: Union[Data, list[Data]]): + assert data is not None + + def fetch_data(self, id: Union[str, list[str], int, list[int]]): + return Data( + fields={ + vdb_Field.GROUP_KEY.value: "test_group", + vdb_Field.METADATA_KEY.value: "{}", + vdb_Field.CONTENT_KEY.value: "content", + vdb_Field.PRIMARY_KEY.value: id, + vdb_Field.VECTOR.value: [-0.00762577411336441, -0.01949881482151406, 0.008832383941428398], + }, + id=id, + ) + + def delete_data(self, id: Union[str, list[str], int, list[int]]): + assert id is not None + + def search_by_vector( + self, + vector, + sparse_vectors=None, + filter=None, + limit=10, + output_fields=None, + partition="default", + dense_weight=None, + ) -> list[Data]: + return [ + Data( + fields={ + vdb_Field.GROUP_KEY.value: "test_group", + vdb_Field.METADATA_KEY.value: '\ + {"source": "/var/folders/ml/xxx/xxx.txt", \ + "document_id": "test_document_id", \ + "dataset_id": "test_dataset_id", \ + "doc_id": "test_id", \ + "doc_hash": "test_hash"}', + vdb_Field.CONTENT_KEY.value: "content", + vdb_Field.PRIMARY_KEY.value: "test_id", + vdb_Field.VECTOR.value: vector, + }, + id="test_id", + score=0.10, + ) + ] + + def search( + self, order=None, filter=None, limit=10, output_fields=None, partition="default", dense_weight=None + ) -> list[Data]: + return [ + Data( + fields={ + vdb_Field.GROUP_KEY.value: "test_group", + vdb_Field.METADATA_KEY.value: '\ + {"source": "/var/folders/ml/xxx/xxx.txt", \ + "document_id": "test_document_id", \ + "dataset_id": "test_dataset_id", \ + "doc_id": "test_id", \ + "doc_hash": "test_hash"}', + vdb_Field.CONTENT_KEY.value: "content", + vdb_Field.PRIMARY_KEY.value: "test_id", + vdb_Field.VECTOR.value: [-0.00762577411336441, -0.01949881482151406, 0.008832383941428398], + }, + id="test_id", + score=0.10, + ) + ] + + +MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" + + +@pytest.fixture +def setup_vikingdb_mock(monkeypatch: MonkeyPatch): + if MOCK: + monkeypatch.setattr(VikingDBService, "__init__", MockVikingDBClass.__init__) + monkeypatch.setattr(VikingDBService, "get_collection", MockVikingDBClass.get_collection) + monkeypatch.setattr(VikingDBService, "create_collection", MockVikingDBClass.create_collection) + monkeypatch.setattr(VikingDBService, "drop_collection", MockVikingDBClass.drop_collection) + monkeypatch.setattr(VikingDBService, "get_index", MockVikingDBClass.get_index) + monkeypatch.setattr(VikingDBService, "create_index", MockVikingDBClass.create_index) + monkeypatch.setattr(VikingDBService, "drop_index", MockVikingDBClass.drop_index) + monkeypatch.setattr(Collection, "upsert_data", MockVikingDBClass.upsert_data) + monkeypatch.setattr(Collection, "fetch_data", MockVikingDBClass.fetch_data) + monkeypatch.setattr(Collection, "delete_data", MockVikingDBClass.delete_data) + monkeypatch.setattr(Index, "search_by_vector", MockVikingDBClass.search_by_vector) + monkeypatch.setattr(Index, "search", MockVikingDBClass.search) + + yield + + if MOCK: + monkeypatch.undo() diff --git a/api/tests/integration_tests/vdb/baidu/__init__.py b/api/tests/integration_tests/vdb/baidu/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/vdb/baidu/test_baidu.py b/api/tests/integration_tests/vdb/baidu/test_baidu.py new file mode 100644 index 0000000000..01a7f8853a --- /dev/null +++ b/api/tests/integration_tests/vdb/baidu/test_baidu.py @@ -0,0 +1,36 @@ +from unittest.mock import MagicMock + +from core.rag.datasource.vdb.baidu.baidu_vector import BaiduConfig, BaiduVector +from tests.integration_tests.vdb.__mock.baiduvectordb import setup_baiduvectordb_mock +from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text, setup_mock_redis + +mock_client = MagicMock() +mock_client.list_databases.return_value = [{"name": "test"}] + + +class BaiduVectorTest(AbstractVectorTest): + def __init__(self): + super().__init__() + self.vector = BaiduVector( + "dify", + BaiduConfig( + endpoint="http://127.0.0.1:5287", + account="root", + api_key="dify", + database="dify", + shard=1, + replicas=3, + ), + ) + + def search_by_vector(self): + hits_by_vector = self.vector.search_by_vector(query_vector=self.example_embedding) + assert len(hits_by_vector) == 1 + + def search_by_full_text(self): + hits_by_full_text = self.vector.search_by_full_text(query=get_example_text()) + assert len(hits_by_full_text) == 0 + + +def test_baidu_vector(setup_mock_redis, setup_baiduvectordb_mock): + BaiduVectorTest().run_all_tests() diff --git a/api/tests/integration_tests/vdb/vikingdb/__init__.py b/api/tests/integration_tests/vdb/vikingdb/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/vdb/vikingdb/test_vikingdb.py b/api/tests/integration_tests/vdb/vikingdb/test_vikingdb.py new file mode 100644 index 0000000000..2572012ea0 --- /dev/null +++ b/api/tests/integration_tests/vdb/vikingdb/test_vikingdb.py @@ -0,0 +1,37 @@ +from core.rag.datasource.vdb.vikingdb.vikingdb_vector import VikingDBConfig, VikingDBVector +from tests.integration_tests.vdb.__mock.vikingdb import setup_vikingdb_mock +from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text, setup_mock_redis + + +class VikingDBVectorTest(AbstractVectorTest): + def __init__(self): + super().__init__() + self.vector = VikingDBVector( + "test_collection", + "test_group", + config=VikingDBConfig( + access_key="test_access_key", + host="test_host", + region="test_region", + scheme="test_scheme", + secret_key="test_secret_key", + connection_timeout=30, + socket_timeout=30, + ), + ) + + def search_by_vector(self): + hits_by_vector = self.vector.search_by_vector(query_vector=self.example_embedding) + assert len(hits_by_vector) == 1 + + def search_by_full_text(self): + hits_by_full_text = self.vector.search_by_full_text(query=get_example_text()) + assert len(hits_by_full_text) == 0 + + def get_ids_by_metadata_field(self): + ids = self.vector.get_ids_by_metadata_field(key="document_id", value="test_document_id") + assert len(ids) > 0 + + +def test_vikingdb_vector(setup_mock_redis, setup_vikingdb_mock): + VikingDBVectorTest().run_all_tests() diff --git a/dev/pytest/pytest_vdb.sh b/dev/pytest/pytest_vdb.sh index bad809cbfd..6809ef7c6f 100755 --- a/dev/pytest/pytest_vdb.sh +++ b/dev/pytest/pytest_vdb.sh @@ -7,4 +7,5 @@ pytest api/tests/integration_tests/vdb/chroma \ api/tests/integration_tests/vdb/pgvector \ api/tests/integration_tests/vdb/qdrant \ api/tests/integration_tests/vdb/weaviate \ - api/tests/integration_tests/vdb/elasticsearch \ No newline at end of file + api/tests/integration_tests/vdb/elasticsearch \ + api/tests/integration_tests/vdb/vikingdb diff --git a/docker/.env.example b/docker/.env.example index eb05f7aa4f..969deadf67 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -91,6 +91,9 @@ MIGRATION_ENABLED=true # The default value is 300 seconds. FILES_ACCESS_TIMEOUT=300 +# Access token expiration time in minutes +ACCESS_TOKEN_EXPIRE_MINUTES=60 + # The maximum number of active requests for the application, where 0 means unlimited, should be a non-negative integer. APP_MAX_ACTIVE_REQUESTS=0 @@ -261,7 +264,7 @@ CONSOLE_CORS_ALLOW_ORIGINS=* # ------------------------------ # The type of storage to use for storing user files. -# Supported values are `local` and `s3` and `azure-blob` and `google-storage` and `tencent-cos` and `huawei-obs` +# Supported values are `local` , `s3` , `azure-blob` , `google-storage`, `tencent-cos`, `huawei-obs`, `volcengine-tos`, `baidu-obs`, `supabase` # Default: `local` STORAGE_TYPE=local @@ -341,6 +344,24 @@ VOLCENGINE_TOS_ENDPOINT=your-server-url # The region of the Volcengine TOS service. VOLCENGINE_TOS_REGION=your-region +# Baidu OBS Storage Configuration +# The name of the Baidu OBS bucket to use for storing files. +BAIDU_OBS_BUCKET_NAME=your-bucket-name +# The secret key to use for authenticating with the Baidu OBS service. +BAIDU_OBS_SECRET_KEY=your-secret-key +# The access key to use for authenticating with the Baidu OBS service. +BAIDU_OBS_ACCESS_KEY=your-access-key +# The endpoint of the Baidu OBS service. +BAIDU_OBS_ENDPOINT=your-server-url + +# Supabase Storage Configuration +# The name of the Supabase bucket to use for storing files. +SUPABASE_BUCKET_NAME=your-bucket-name +# The api key to use for authenticating with the Supabase service. +SUPABASE_API_KEY=your-access-key +# The project endpoint url of the Supabase service. +SUPABASE_URL=your-server-url + # ------------------------------ # Vector Database Configuration # ------------------------------ @@ -462,6 +483,15 @@ ELASTICSEARCH_PORT=9200 ELASTICSEARCH_USERNAME=elastic ELASTICSEARCH_PASSWORD=elastic +# baidu vector configurations, only available when VECTOR_STORE is `baidu` +BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287 +BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS=30000 +BAIDU_VECTOR_DB_ACCOUNT=root +BAIDU_VECTOR_DB_API_KEY=dify +BAIDU_VECTOR_DB_DATABASE=dify +BAIDU_VECTOR_DB_SHARD=1 +BAIDU_VECTOR_DB_REPLICAS=3 + # ------------------------------ # Knowledge Configuration # ------------------------------ @@ -797,4 +827,6 @@ POSITION_TOOL_EXCLUDES= # Example: POSITION_PROVIDER_PINS=openai,openllm POSITION_PROVIDER_PINS= POSITION_PROVIDER_INCLUDES= -POSITION_PROVIDER_EXCLUDES= \ No newline at end of file +POSITION_PROVIDER_EXCLUDES= +# CSP https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP +CSP_WHITELIST= \ No newline at end of file diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index c1a76d7f88..5db11d1961 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -47,6 +47,7 @@ x-shared-env: &shared-api-worker-env REDIS_SENTINEL_SERVICE_NAME: ${REDIS_SENTINEL_SERVICE_NAME:-} REDIS_SENTINEL_USERNAME: ${REDIS_SENTINEL_USERNAME:-} REDIS_SENTINEL_PASSWORD: ${REDIS_SENTINEL_PASSWORD:-} + ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES:-60} REDIS_SENTINEL_SOCKET_TIMEOUT: ${REDIS_SENTINEL_SOCKET_TIMEOUT:-0.1} CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://:difyai123456@redis:6379/1} BROKER_USE_SSL: ${BROKER_USE_SSL:-false} @@ -165,6 +166,18 @@ x-shared-env: &shared-api-worker-env TENCENT_VECTOR_DB_DATABASE: ${TENCENT_VECTOR_DB_DATABASE:-dify} TENCENT_VECTOR_DB_SHARD: ${TENCENT_VECTOR_DB_SHARD:-1} TENCENT_VECTOR_DB_REPLICAS: ${TENCENT_VECTOR_DB_REPLICAS:-2} + BAIDU_VECTOR_DB_ENDPOINT: ${BAIDU_VECTOR_DB_ENDPOINT:-http://127.0.0.1:5287} + BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS: ${BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS:-30000} + BAIDU_VECTOR_DB_ACCOUNT: ${BAIDU_VECTOR_DB_ACCOUNT:-root} + BAIDU_VECTOR_DB_API_KEY: ${BAIDU_VECTOR_DB_API_KEY:-dify} + BAIDU_VECTOR_DB_DATABASE: ${BAIDU_VECTOR_DB_DATABASE:-dify} + BAIDU_VECTOR_DB_SHARD: ${BAIDU_VECTOR_DB_SHARD:-1} + BAIDU_VECTOR_DB_REPLICAS: ${BAIDU_VECTOR_DB_REPLICAS:-3} + VIKINGDB_ACCESS_KEY: ${VIKINGDB_ACCESS_KEY:-dify} + VIKINGDB_SECRET_KEY: ${VIKINGDB_SECRET_KEY:-dify} + VIKINGDB_REGION: ${VIKINGDB_REGION:-cn-shanghai} + VIKINGDB_HOST: ${VIKINGDB_HOST:-api-vikingdb.xxx.volces.com} + VIKINGDB_SCHEMA: ${VIKINGDB_SCHEMA:-http} UPLOAD_FILE_SIZE_LIMIT: ${UPLOAD_FILE_SIZE_LIMIT:-15} UPLOAD_FILE_BATCH_LIMIT: ${UPLOAD_FILE_BATCH_LIMIT:-5} ETL_TYPE: ${ETL_TYPE:-dify} @@ -209,6 +222,7 @@ x-shared-env: &shared-api-worker-env SSRF_PROXY_HTTPS_URL: ${SSRF_PROXY_HTTPS_URL:-http://ssrf_proxy:3128} HTTP_REQUEST_NODE_MAX_BINARY_SIZE: ${HTTP_REQUEST_NODE_MAX_BINARY_SIZE:-10485760} HTTP_REQUEST_NODE_MAX_TEXT_SIZE: ${HTTP_REQUEST_NODE_MAX_TEXT_SIZE:-1048576} + APP_MAX_EXECUTION_TIME: ${APP_MAX_EXECUTION_TIME:-12000} services: # API service @@ -260,6 +274,7 @@ services: SENTRY_DSN: ${WEB_SENTRY_DSN:-} NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0} TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000} + CSP_WHITELIST: ${CSP_WHITELIST:-} # The postgres database. db: @@ -279,7 +294,7 @@ services: volumes: - ./volumes/db/data:/var/lib/postgresql/data healthcheck: - test: [ "CMD", "pg_isready" ] + test: ['CMD', 'pg_isready'] interval: 1s timeout: 3s retries: 30 @@ -294,7 +309,7 @@ services: # Set the redis password when startup redis server. command: redis-server --requirepass ${REDIS_PASSWORD:-difyai123456} healthcheck: - test: [ "CMD", "redis-cli", "ping" ] + test: ['CMD', 'redis-cli', 'ping'] # The DifySandbox sandbox: @@ -314,7 +329,7 @@ services: volumes: - ./volumes/sandbox/dependencies:/dependencies healthcheck: - test: [ "CMD", "curl", "-f", "http://localhost:8194/health" ] + test: ['CMD', 'curl', '-f', 'http://localhost:8194/health'] networks: - ssrf_proxy_network @@ -327,7 +342,12 @@ services: volumes: - ./ssrf_proxy/squid.conf.template:/etc/squid/squid.conf.template - ./ssrf_proxy/docker-entrypoint.sh:/docker-entrypoint-mount.sh - entrypoint: [ "sh", "-c", "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ] + entrypoint: + [ + 'sh', + '-c', + "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh", + ] environment: # pls clearly modify the squid env vars to fit your network environment. HTTP_PORT: ${SSRF_HTTP_PORT:-3128} @@ -356,8 +376,8 @@ services: - CERTBOT_EMAIL=${CERTBOT_EMAIL} - CERTBOT_DOMAIN=${CERTBOT_DOMAIN} - CERTBOT_OPTIONS=${CERTBOT_OPTIONS:-} - entrypoint: [ "/docker-entrypoint.sh" ] - command: [ "tail", "-f", "/dev/null" ] + entrypoint: ['/docker-entrypoint.sh'] + command: ['tail', '-f', '/dev/null'] # The nginx reverse proxy. # used for reverse proxying the API service and Web service. @@ -374,7 +394,12 @@ services: - ./volumes/certbot/conf/live:/etc/letsencrypt/live # cert dir (with certbot container) - ./volumes/certbot/conf:/etc/letsencrypt - ./volumes/certbot/www:/var/www/html - entrypoint: [ "sh", "-c", "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ] + entrypoint: + [ + 'sh', + '-c', + "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh", + ] environment: NGINX_SERVER_NAME: ${NGINX_SERVER_NAME:-_} NGINX_HTTPS_ENABLED: ${NGINX_HTTPS_ENABLED:-false} @@ -396,14 +421,14 @@ services: - api - web ports: - - "${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}" - - "${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}" + - '${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}' + - '${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}' # The Weaviate vector store. weaviate: image: semitechnologies/weaviate:1.19.0 profiles: - - "" + - '' - weaviate restart: always volumes: @@ -452,7 +477,7 @@ services: volumes: - ./volumes/pgvector/data:/var/lib/postgresql/data healthcheck: - test: [ "CMD", "pg_isready" ] + test: ['CMD', 'pg_isready'] interval: 1s timeout: 3s retries: 30 @@ -474,7 +499,7 @@ services: volumes: - ./volumes/pgvecto_rs/data:/var/lib/postgresql/data healthcheck: - test: [ "CMD", "pg_isready" ] + test: ['CMD', 'pg_isready'] interval: 1s timeout: 3s retries: 30 @@ -522,7 +547,7 @@ services: - ./volumes/milvus/etcd:/etcd command: etcd -advertise-client-urls=http://127.0.0.1:2379 -listen-client-urls http://0.0.0.0:2379 --data-dir /etcd healthcheck: - test: [ "CMD", "etcdctl", "endpoint", "health" ] + test: ['CMD', 'etcdctl', 'endpoint', 'health'] interval: 30s timeout: 20s retries: 3 @@ -541,7 +566,7 @@ services: - ./volumes/milvus/minio:/minio_data command: minio server /minio_data --console-address ":9001" healthcheck: - test: [ "CMD", "curl", "-f", "http://localhost:9000/minio/health/live" ] + test: ['CMD', 'curl', '-f', 'http://localhost:9000/minio/health/live'] interval: 30s timeout: 20s retries: 3 @@ -553,7 +578,7 @@ services: image: milvusdb/milvus:v2.3.1 profiles: - milvus - command: [ "milvus", "run", "standalone" ] + command: ['milvus', 'run', 'standalone'] environment: ETCD_ENDPOINTS: ${ETCD_ENDPOINTS:-etcd:2379} MINIO_ADDRESS: ${MINIO_ADDRESS:-minio:9000} @@ -561,7 +586,7 @@ services: volumes: - ./volumes/milvus/milvus:/var/lib/milvus healthcheck: - test: [ "CMD", "curl", "-f", "http://localhost:9091/healthz" ] + test: ['CMD', 'curl', '-f', 'http://localhost:9091/healthz'] interval: 30s start_period: 90s timeout: 20s @@ -630,7 +655,7 @@ services: # https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html # https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html#docker-prod-prerequisites elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:8.15.1 + image: docker.elastic.co/elasticsearch/elasticsearch:8.14.3 container_name: elasticsearch profiles: - elasticsearch @@ -643,13 +668,13 @@ services: node.name: dify-es0 discovery.type: single-node xpack.license.self_generated.type: trial - xpack.security.enabled: "true" - xpack.security.enrollment.enabled: "false" - xpack.security.http.ssl.enabled: "false" + xpack.security.enabled: 'true' + xpack.security.enrollment.enabled: 'false' + xpack.security.http.ssl.enabled: 'false' ports: - ${ELASTICSEARCH_PORT:-9200}:9200 healthcheck: - test: [ "CMD", "curl", "-s", "http://localhost:9200/_cluster/health?pretty" ] + test: ['CMD', 'curl', '-s', 'http://localhost:9200/_cluster/health?pretty'] interval: 30s timeout: 10s retries: 50 @@ -657,7 +682,7 @@ services: # https://www.elastic.co/guide/en/kibana/current/docker.html # https://www.elastic.co/guide/en/kibana/current/settings.html kibana: - image: docker.elastic.co/kibana/kibana:8.15.1 + image: docker.elastic.co/kibana/kibana:8.14.3 container_name: kibana profiles: - elasticsearch @@ -667,17 +692,17 @@ services: environment: XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY: d1a66dfd-c4d3-4a0a-8290-2abcb83ab3aa NO_PROXY: localhost,127.0.0.1,elasticsearch,kibana - XPACK_SECURITY_ENABLED: "true" - XPACK_SECURITY_ENROLLMENT_ENABLED: "false" - XPACK_SECURITY_HTTP_SSL_ENABLED: "false" - XPACK_FLEET_ISAIRGAPPED: "true" + XPACK_SECURITY_ENABLED: 'true' + XPACK_SECURITY_ENROLLMENT_ENABLED: 'false' + XPACK_SECURITY_HTTP_SSL_ENABLED: 'false' + XPACK_FLEET_ISAIRGAPPED: 'true' I18N_LOCALE: zh-CN - SERVER_PORT: "5601" + SERVER_PORT: '5601' ELASTICSEARCH_HOSTS: http://elasticsearch:9200 ports: - ${KIBANA_PORT:-5601}:5601 healthcheck: - test: [ "CMD-SHELL", "curl -s http://localhost:5601 >/dev/null || exit 1" ] + test: ['CMD-SHELL', 'curl -s http://localhost:5601 >/dev/null || exit 1'] interval: 30s timeout: 10s retries: 3 diff --git a/web/.env.example b/web/.env.example index 8e254082b3..13ea01a2c7 100644 --- a/web/.env.example +++ b/web/.env.example @@ -22,3 +22,6 @@ NEXT_PUBLIC_UPLOAD_IMAGE_AS_ICON=false # The timeout for the text generation in millisecond NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS=60000 + +# CSP https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP +NEXT_PUBLIC_CSP_WHITELIST= diff --git a/web/app/(shareLayout)/layout.tsx b/web/app/(shareLayout)/layout.tsx index 9c4632cd45..259af2bc2d 100644 --- a/web/app/(shareLayout)/layout.tsx +++ b/web/app/(shareLayout)/layout.tsx @@ -1,7 +1,12 @@ import React from 'react' import type { FC } from 'react' +import type { Metadata } from 'next' import GA, { GaType } from '@/app/components/base/ga' +export const metadata: Metadata = { + icons: 'data:,', // prevent browser from using default favicon +} + const Layout: FC<{ children: React.ReactNode }> = ({ children }) => { diff --git a/web/app/account/account-page/index.module.css b/web/app/account/account-page/index.module.css new file mode 100644 index 0000000000..949d1257e9 --- /dev/null +++ b/web/app/account/account-page/index.module.css @@ -0,0 +1,9 @@ +.modal { + padding: 24px 32px !important; + width: 400px !important; +} + +.bg { + background: linear-gradient(180deg, rgba(217, 45, 32, 0.05) 0%, rgba(217, 45, 32, 0.00) 24.02%), #F9FAFB; +} + diff --git a/web/app/account/account-page/index.tsx b/web/app/account/account-page/index.tsx new file mode 100644 index 0000000000..53f7692e6c --- /dev/null +++ b/web/app/account/account-page/index.tsx @@ -0,0 +1,304 @@ +'use client' +import { useState } from 'react' +import { useTranslation } from 'react-i18next' + +import { useContext } from 'use-context-selector' +import s from './index.module.css' +import Collapse from '@/app/components/header/account-setting/collapse' +import type { IItem } from '@/app/components/header/account-setting/collapse' +import Modal from '@/app/components/base/modal' +import Confirm from '@/app/components/base/confirm' +import Button from '@/app/components/base/button' +import { updateUserProfile } from '@/service/common' +import { useAppContext } from '@/context/app-context' +import { ToastContext } from '@/app/components/base/toast' +import AppIcon from '@/app/components/base/app-icon' +import Avatar from '@/app/components/base/avatar' +import { IS_CE_EDITION } from '@/config' + +const titleClassName = ` + text-sm font-medium text-gray-900 +` +const descriptionClassName = ` + mt-1 text-xs font-normal text-gray-500 +` +const inputClassName = ` + mt-2 w-full px-3 py-2 bg-gray-100 rounded + text-sm font-normal text-gray-800 +` + +const validPassword = /^(?=.*[a-zA-Z])(?=.*\d).{8,}$/ + +export default function AccountPage() { + const { t } = useTranslation() + const { mutateUserProfile, userProfile, apps } = useAppContext() + const { notify } = useContext(ToastContext) + const [editNameModalVisible, setEditNameModalVisible] = useState(false) + const [editName, setEditName] = useState('') + const [editing, setEditing] = useState(false) + const [editPasswordModalVisible, setEditPasswordModalVisible] = useState(false) + const [currentPassword, setCurrentPassword] = useState('') + const [password, setPassword] = useState('') + const [confirmPassword, setConfirmPassword] = useState('') + const [showDeleteAccountModal, setShowDeleteAccountModal] = useState(false) + + const handleEditName = () => { + setEditNameModalVisible(true) + setEditName(userProfile.name) + } + const handleSaveName = async () => { + try { + setEditing(true) + await updateUserProfile({ url: 'account/name', body: { name: editName } }) + notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') }) + mutateUserProfile() + setEditNameModalVisible(false) + setEditing(false) + } + catch (e) { + notify({ type: 'error', message: (e as Error).message }) + setEditNameModalVisible(false) + setEditing(false) + } + } + + const showErrorMessage = (message: string) => { + notify({ + type: 'error', + message, + }) + } + const valid = () => { + if (!password.trim()) { + showErrorMessage(t('login.error.passwordEmpty')) + return false + } + if (!validPassword.test(password)) { + showErrorMessage(t('login.error.passwordInvalid')) + return false + } + if (password !== confirmPassword) { + showErrorMessage(t('common.account.notEqual')) + return false + } + + return true + } + const resetPasswordForm = () => { + setCurrentPassword('') + setPassword('') + setConfirmPassword('') + } + const handleSavePassword = async () => { + if (!valid()) + return + try { + setEditing(true) + await updateUserProfile({ + url: 'account/password', + body: { + password: currentPassword, + new_password: password, + repeat_new_password: confirmPassword, + }, + }) + notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') }) + mutateUserProfile() + setEditPasswordModalVisible(false) + resetPasswordForm() + setEditing(false) + } + catch (e) { + notify({ type: 'error', message: (e as Error).message }) + setEditPasswordModalVisible(false) + setEditing(false) + } + } + + const renderAppItem = (item: IItem) => { + return ( +
+
+ +
+
{item.name}
+
+ ) + } + + return ( + <> +
+

{t('common.account.myAccount')}

+
+
+ +
+

{userProfile.name}

+

{userProfile.email}

+
+
+
+
{t('common.account.name')}
+
+
+ {userProfile.name} +
+
+ {t('common.operation.edit')} +
+
+
+
+
{t('common.account.email')}
+
+
+ {userProfile.email} +
+
+
+ { + IS_CE_EDITION && ( +
+
+
{t('common.account.password')}
+
{t('common.account.passwordTip')}
+
+ +
+ ) + } +
+
+
{t('common.account.langGeniusAccount')}
+
{t('common.account.langGeniusAccountTip')}
+ {!!apps.length && ( + ({ key: app.id, name: app.name }))} + renderItem={renderAppItem} + wrapperClassName='mt-2' + /> + )} + {!IS_CE_EDITION && } +
+ { + editNameModalVisible && ( + setEditNameModalVisible(false)} + className={s.modal} + > +
{t('common.account.editName')}
+
{t('common.account.name')}
+ setEditName(e.target.value)} + /> +
+ + +
+
+ ) + } + { + editPasswordModalVisible && ( + { + setEditPasswordModalVisible(false) + resetPasswordForm() + }} + className={s.modal} + > +
{userProfile.is_password_set ? t('common.account.resetPassword') : t('common.account.setPassword')}
+ {userProfile.is_password_set && ( + <> +
{t('common.account.currentPassword')}
+ setCurrentPassword(e.target.value)} + /> + + )} +
+ {userProfile.is_password_set ? t('common.account.newPassword') : t('common.account.password')} +
+ setPassword(e.target.value)} + /> +
{t('common.account.confirmPassword')}
+ setConfirmPassword(e.target.value)} + /> +
+ + +
+
+ ) + } + { + showDeleteAccountModal && ( + setShowDeleteAccountModal(false)} + onConfirm={() => setShowDeleteAccountModal(false)} + showCancel={false} + type='warning' + title={t('common.account.delete')} + content={ + <> +
+ {t('common.account.deleteTip')} +
+ +
{`${t('common.account.delete')}: ${userProfile.email}`}
+ + } + confirmText={t('common.operation.ok') as string} + /> + ) + } + + ) +} diff --git a/web/app/account/avatar.tsx b/web/app/account/avatar.tsx new file mode 100644 index 0000000000..29bd0cb5a5 --- /dev/null +++ b/web/app/account/avatar.tsx @@ -0,0 +1,94 @@ +'use client' +import { useTranslation } from 'react-i18next' +import { Fragment } from 'react' +import { useRouter } from 'next/navigation' +import { Menu, Transition } from '@headlessui/react' +import Avatar from '@/app/components/base/avatar' +import { logout } from '@/service/common' +import { useAppContext } from '@/context/app-context' +import { LogOut01 } from '@/app/components/base/icons/src/vender/line/general' + +export type IAppSelector = { + isMobile: boolean +} + +export default function AppSelector() { + const router = useRouter() + const { t } = useTranslation() + const { userProfile } = useAppContext() + + const handleLogout = async () => { + await logout({ + url: '/logout', + params: {}, + }) + + if (localStorage?.getItem('console_token')) + localStorage.removeItem('console_token') + + router.push('/signin') + } + + return ( + + { + ({ open }) => ( + <> +
+ + + +
+ + + +
+
+
+
{userProfile.name}
+
{userProfile.email}
+
+ +
+
+
+ +
handleLogout()}> +
+ +
{t('common.userProfile.logout')}
+
+
+
+
+
+ + ) + } +
+ ) +} diff --git a/web/app/account/header.tsx b/web/app/account/header.tsx new file mode 100644 index 0000000000..694533e5ab --- /dev/null +++ b/web/app/account/header.tsx @@ -0,0 +1,37 @@ +'use client' +import { useTranslation } from 'react-i18next' +import { RiArrowRightUpLine, RiRobot2Line } from '@remixicon/react' +import { useRouter } from 'next/navigation' +import Button from '../components/base/button' +import Avatar from './avatar' +import LogoSite from '@/app/components/base/logo/logo-site' + +const Header = () => { + const { t } = useTranslation() + const router = useRouter() + + const back = () => { + router.back() + } + return ( +
+
+
+ +
+
+

{t('common.account.account')}

+
+
+ +
+ +
+
+ ) +} +export default Header diff --git a/web/app/account/layout.tsx b/web/app/account/layout.tsx new file mode 100644 index 0000000000..5aa8b05cbf --- /dev/null +++ b/web/app/account/layout.tsx @@ -0,0 +1,40 @@ +import React from 'react' +import type { ReactNode } from 'react' +import Header from './header' +import SwrInitor from '@/app/components/swr-initor' +import { AppContextProvider } from '@/context/app-context' +import GA, { GaType } from '@/app/components/base/ga' +import HeaderWrapper from '@/app/components/header/header-wrapper' +import { EventEmitterContextProvider } from '@/context/event-emitter' +import { ProviderContextProvider } from '@/context/provider-context' +import { ModalContextProvider } from '@/context/modal-context' + +const Layout = ({ children }: { children: ReactNode }) => { + return ( + <> + + + + + + + +
+ +
+ {children} +
+ + + + + + + ) +} + +export const metadata = { + title: 'Dify', +} + +export default Layout diff --git a/web/app/account/page.tsx b/web/app/account/page.tsx new file mode 100644 index 0000000000..bb7e7f7feb --- /dev/null +++ b/web/app/account/page.tsx @@ -0,0 +1,7 @@ +import AccountPage from './account-page' + +export default function Account() { + return
+ +
+} diff --git a/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx b/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx index 7f83a14d58..f556121518 100644 --- a/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx +++ b/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx @@ -1,6 +1,6 @@ 'use client' -import { memo, useEffect, useMemo } from 'react' +import { memo, useCallback, useEffect, useMemo } from 'react' import type { FC } from 'react' import { useTranslation } from 'react-i18next' import WeightedScore from './weighted-score' @@ -11,7 +11,7 @@ import type { DatasetConfigs, } from '@/models/debug' import ModelSelector from '@/app/components/header/account-setting/model-provider-page/model-selector' -import { useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks' +import { useCurrentProviderAndModel, useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks' import type { ModelConfig } from '@/app/components/workflow/types' import ModelParameterModal from '@/app/components/header/account-setting/model-provider-page/model-parameter-modal' import Tooltip from '@/app/components/base/tooltip' @@ -23,6 +23,7 @@ import { RerankingModeEnum } from '@/models/datasets' import cn from '@/utils/classnames' import { useSelectedDatasetsMode } from '@/app/components/workflow/nodes/knowledge-retrieval/hooks' import Switch from '@/app/components/base/switch' +import Toast from '@/app/components/base/toast' type Props = { datasetConfigs: DatasetConfigs @@ -60,6 +61,24 @@ const ConfigContent: FC = ({ modelList: rerankModelList, defaultModel: rerankDefaultModel, } = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.rerank) + + const { + currentModel, + } = useCurrentProviderAndModel( + rerankModelList, + rerankDefaultModel + ? { + ...rerankDefaultModel, + provider: rerankDefaultModel.provider.provider, + } + : undefined, + ) + + const handleDisabledSwitchClick = useCallback(() => { + if (!currentModel) + Toast.notify({ type: 'error', message: t('workflow.errorMsg.rerankModelRequired') }) + }, [currentModel, rerankDefaultModel, t]) + const rerankModel = (() => { if (datasetConfigs.reranking_model?.reranking_provider_name) { return { @@ -231,16 +250,22 @@ const ConfigContent: FC = ({
{ selectedDatasetsMode.allEconomic && ( - { - onChange({ - ...datasetConfigs, - reranking_enable: v, - }) - }} - /> +
+ { + onChange({ + ...datasetConfigs, + reranking_enable: v, + }) + }} + /> +
) }
{t('common.modelProvider.rerankModel.key')}
diff --git a/web/app/components/app/log/list.tsx b/web/app/components/app/log/list.tsx index cfc5452c08..6d643a01a3 100644 --- a/web/app/components/app/log/list.tsx +++ b/web/app/components/app/log/list.tsx @@ -301,10 +301,14 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) { })) }, [items]) + const fetchInitiated = useRef(false) + useEffect(() => { - if (appDetail?.id && detail.id && appDetail?.mode !== 'completion') + if (appDetail?.id && detail.id && appDetail?.mode !== 'completion' && !fetchInitiated.current) { + fetchInitiated.current = true fetchData() - }, [appDetail?.id, detail.id, appDetail?.mode]) + } + }, [appDetail?.id, detail.id, appDetail?.mode, fetchData]) const isChatMode = appDetail?.mode !== 'completion' const isAdvanced = appDetail?.mode === 'advanced-chat' diff --git a/web/app/components/base/chat/chat/citation/popup.tsx b/web/app/components/base/chat/chat/citation/popup.tsx index b61bf623fe..9b98329e70 100644 --- a/web/app/components/base/chat/chat/citation/popup.tsx +++ b/web/app/components/base/chat/chat/citation/popup.tsx @@ -100,7 +100,7 @@ const Popup: FC = ({ /> } /> { diff --git a/web/app/components/base/ga/index.tsx b/web/app/components/base/ga/index.tsx index ec0089ff70..219724113f 100644 --- a/web/app/components/base/ga/index.tsx +++ b/web/app/components/base/ga/index.tsx @@ -1,6 +1,7 @@ import type { FC } from 'react' import React from 'react' import Script from 'next/script' +import { headers } from 'next/headers' import { IS_CE_EDITION } from '@/config' export enum GaType { @@ -23,9 +24,16 @@ const GA: FC = ({ if (IS_CE_EDITION) return null + const nonce = process.env.NODE_ENV === 'production' ? headers().get('x-nonce') : '' + return ( <> - + diff --git a/web/app/components/base/markdown.tsx b/web/app/components/base/markdown.tsx index 39a399cc9f..dbe4087882 100644 --- a/web/app/components/base/markdown.tsx +++ b/web/app/components/base/markdown.tsx @@ -245,7 +245,7 @@ export function Markdown(props: { content: string; className?: string }) { return (
string) manualClose?: boolean + disabled?: boolean } const timeoutDuration = 100 @@ -30,6 +31,7 @@ export default function CustomPopover({ className, btnClassName, manualClose, + disabled = false, }: IPopover) { const buttonRef = useRef(null) const timeOutRef = useRef(null) @@ -60,6 +62,7 @@ export default function CustomPopover({ > { - return ( - <> - - ) -} - -export default Topbar diff --git a/web/app/components/datasets/common/retrieval-param-config/index.tsx b/web/app/components/datasets/common/retrieval-param-config/index.tsx index 323e47f3b4..9d48d56a8d 100644 --- a/web/app/components/datasets/common/retrieval-param-config/index.tsx +++ b/web/app/components/datasets/common/retrieval-param-config/index.tsx @@ -1,6 +1,6 @@ 'use client' import type { FC } from 'react' -import React from 'react' +import React, { useCallback } from 'react' import { useTranslation } from 'react-i18next' import cn from '@/utils/classnames' @@ -11,7 +11,7 @@ import Switch from '@/app/components/base/switch' import Tooltip from '@/app/components/base/tooltip' import type { RetrievalConfig } from '@/types/app' import ModelSelector from '@/app/components/header/account-setting/model-provider-page/model-selector' -import { useModelListAndDefaultModel } from '@/app/components/header/account-setting/model-provider-page/hooks' +import { useCurrentProviderAndModel, useModelListAndDefaultModel } from '@/app/components/header/account-setting/model-provider-page/hooks' import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations' import { DEFAULT_WEIGHTED_SCORE, @@ -19,6 +19,7 @@ import { WeightedScoreEnum, } from '@/models/datasets' import WeightedScore from '@/app/components/app/configuration/dataset-config/params-config/weighted-score' +import Toast from '@/app/components/base/toast' type Props = { type: RETRIEVE_METHOD @@ -38,6 +39,24 @@ const RetrievalParamConfig: FC = ({ defaultModel: rerankDefaultModel, modelList: rerankModelList, } = useModelListAndDefaultModel(ModelTypeEnum.rerank) + + const { + currentModel, + } = useCurrentProviderAndModel( + rerankModelList, + rerankDefaultModel + ? { + ...rerankDefaultModel, + provider: rerankDefaultModel.provider.provider, + } + : undefined, + ) + + const handleDisabledSwitchClick = useCallback(() => { + if (!currentModel) + Toast.notify({ type: 'error', message: t('workflow.errorMsg.rerankModelRequired') }) + }, [currentModel, rerankDefaultModel, t]) + const isHybridSearch = type === RETRIEVE_METHOD.hybrid const rerankModel = (() => { @@ -99,16 +118,22 @@ const RetrievalParamConfig: FC = ({
{canToggleRerankModalEnable && ( - { - onChange({ - ...value, - reranking_enable: v, - }) - }} - /> +
+ { + onChange({ + ...value, + reranking_enable: v, + }) + }} + disabled={!currentModel} + /> +
)}
{t('common.modelProvider.rerankModel.key')} diff --git a/web/app/components/datasets/create/step-two/index.tsx b/web/app/components/datasets/create/step-two/index.tsx index 765f0622dd..718c412bd6 100644 --- a/web/app/components/datasets/create/step-two/index.tsx +++ b/web/app/components/datasets/create/step-two/index.tsx @@ -133,6 +133,7 @@ const StepTwo = ({ ? IndexingType.QUALIFIED : IndexingType.ECONOMICAL, ) + const [isLanguageSelectDisabled, setIsLanguageSelectDisabled] = useState(false) const [docForm, setDocForm] = useState( (datasetId && documentDetail) ? documentDetail.doc_form : DocForm.TEXT, ) @@ -201,9 +202,9 @@ const StepTwo = ({ } } - const fetchFileIndexingEstimate = async (docForm = DocForm.TEXT) => { + const fetchFileIndexingEstimate = async (docForm = DocForm.TEXT, language?: string) => { // eslint-disable-next-line @typescript-eslint/no-use-before-define - const res = await didFetchFileIndexingEstimate(getFileIndexingEstimateParams(docForm)!) + const res = await didFetchFileIndexingEstimate(getFileIndexingEstimateParams(docForm, language)!) if (segmentationType === SegmentType.CUSTOM) setCustomFileIndexingEstimate(res) else @@ -271,7 +272,7 @@ const StepTwo = ({ } } - const getFileIndexingEstimateParams = (docForm: DocForm): IndexingEstimateParams | undefined => { + const getFileIndexingEstimateParams = (docForm: DocForm, language?: string): IndexingEstimateParams | undefined => { if (dataSourceType === DataSourceType.FILE) { return { info_list: { @@ -283,7 +284,7 @@ const StepTwo = ({ indexing_technique: getIndexing_technique() as string, process_rule: getProcessRule(), doc_form: docForm, - doc_language: docLanguage, + doc_language: language || docLanguage, dataset_id: datasetId as string, } } @@ -296,7 +297,7 @@ const StepTwo = ({ indexing_technique: getIndexing_technique() as string, process_rule: getProcessRule(), doc_form: docForm, - doc_language: docLanguage, + doc_language: language || docLanguage, dataset_id: datasetId as string, } } @@ -309,7 +310,7 @@ const StepTwo = ({ indexing_technique: getIndexing_technique() as string, process_rule: getProcessRule(), doc_form: docForm, - doc_language: docLanguage, + doc_language: language || docLanguage, dataset_id: datasetId as string, } } @@ -484,8 +485,26 @@ const StepTwo = ({ setDocForm(DocForm.TEXT) } + const previewSwitch = async (language?: string) => { + setPreviewSwitched(true) + setIsLanguageSelectDisabled(true) + if (segmentationType === SegmentType.AUTO) + setAutomaticFileIndexingEstimate(null) + else + setCustomFileIndexingEstimate(null) + try { + await fetchFileIndexingEstimate(DocForm.QA, language) + } + finally { + setIsLanguageSelectDisabled(false) + } + } + const handleSelect = (language: string) => { setDocLanguage(language) + // Switch language, re-cutter + if (docForm === DocForm.QA && previewSwitched) + previewSwitch(language) } const changeToEconomicalType = () => { @@ -495,15 +514,6 @@ const StepTwo = ({ } } - const previewSwitch = async () => { - setPreviewSwitched(true) - if (segmentationType === SegmentType.AUTO) - setAutomaticFileIndexingEstimate(null) - else - setCustomFileIndexingEstimate(null) - await fetchFileIndexingEstimate(DocForm.QA) - } - useEffect(() => { // fetch rules if (!isSetting) { @@ -576,7 +586,7 @@ const StepTwo = ({
{t('datasetCreation.steps.two')} - {isMobile && ( + {(isMobile || !showPreview) && (
{t('datasetCreation.stepTwo.QALanguage')} - +
@@ -942,7 +952,7 @@ const StepTwo = ({
{t('datasetCreation.stepTwo.previewTitle')}
{docForm === DocForm.QA && !previewSwitched && ( - + )}
diff --git a/web/app/components/datasets/create/step-two/language-select/index.tsx b/web/app/components/datasets/create/step-two/language-select/index.tsx index f8709c89f3..fab2bb1c71 100644 --- a/web/app/components/datasets/create/step-two/language-select/index.tsx +++ b/web/app/components/datasets/create/step-two/language-select/index.tsx @@ -9,16 +9,19 @@ import { languages } from '@/i18n/language' export type ILanguageSelectProps = { currentLanguage: string onSelect: (language: string) => void + disabled?: boolean } const LanguageSelect: FC = ({ currentLanguage, onSelect, + disabled, }) => { return ( {languages.filter(language => language.supported).map(({ prompt_name, name }) => ( diff --git a/web/app/components/datasets/documents/list.tsx b/web/app/components/datasets/documents/list.tsx index 540474e7a5..0e0eebb034 100644 --- a/web/app/components/datasets/documents/list.tsx +++ b/web/app/components/datasets/documents/list.tsx @@ -122,6 +122,7 @@ export const OperationAction: FC<{ }> = ({ embeddingAvailable, datasetId, detail, onUpdate, scene = 'list', className = '' }) => { const { id, enabled = false, archived = false, data_source_type } = detail || {} const [showModal, setShowModal] = useState(false) + const [deleting, setDeleting] = useState(false) const { notify } = useContext(ToastContext) const { t } = useTranslation() const router = useRouter() @@ -153,6 +154,7 @@ export const OperationAction: FC<{ break default: opApi = deleteDocument + setDeleting(true) break } const [e] = await asyncRunSafe(opApi({ datasetId, documentId: id }) as Promise) @@ -160,6 +162,8 @@ export const OperationAction: FC<{ notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') }) else notify({ type: 'error', message: t('common.actionMsg.modifiedUnsuccessfully') }) + if (operationName === 'delete') + setDeleting(false) onUpdate(operationName) } @@ -295,6 +299,8 @@ export const OperationAction: FC<{ {showModal && {
-
setShowAccountSettingModal({ payload: 'account' })}> + +
{t('common.account.account')}
+ + + + +
setShowAccountSettingModal({ payload: 'members' })}>
{t('common.userProfile.settings')}
diff --git a/web/app/components/header/account-setting/index.tsx b/web/app/components/header/account-setting/index.tsx index 253b9f1b4c..d829f6b77b 100644 --- a/web/app/components/header/account-setting/index.tsx +++ b/web/app/components/header/account-setting/index.tsx @@ -2,10 +2,6 @@ import { useTranslation } from 'react-i18next' import { useEffect, useRef, useState } from 'react' import { - RiAccountCircleFill, - RiAccountCircleLine, - RiApps2AddFill, - RiApps2AddLine, RiBox3Fill, RiBox3Line, RiCloseLine, @@ -21,9 +17,7 @@ import { RiPuzzle2Line, RiTranslate2, } from '@remixicon/react' -import AccountPage from './account-page' import MembersPage from './members-page' -import IntegrationsPage from './Integrations-page' import LanguagePage from './language-page' import ApiBasedExtensionPage from './api-based-extension-page' import DataSourcePage from './data-source-page' @@ -60,7 +54,7 @@ type GroupItem = { export default function AccountSetting({ onCancel, - activeTab = 'account', + activeTab = 'members', }: IAccountSettingProps) { const [activeMenu, setActiveMenu] = useState(activeTab) const { t } = useTranslation() @@ -125,18 +119,6 @@ export default function AccountSetting({ key: 'account-group', name: t('common.settings.accountGroup'), items: [ - { - key: 'account', - name: t('common.settings.account'), - icon: , - activeIcon: , - }, - { - key: 'integrations', - name: t('common.settings.integrations'), - icon: , - activeIcon: , - }, { key: 'language', name: t('common.settings.language'), @@ -217,10 +199,8 @@ export default function AccountSetting({
- {activeMenu === 'account' && } {activeMenu === 'members' && } {activeMenu === 'billing' && } - {activeMenu === 'integrations' && } {activeMenu === 'language' && } {activeMenu === 'provider' && } {activeMenu === 'data-source' && } diff --git a/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx b/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx index a22ec16c25..a16b101e6a 100644 --- a/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx +++ b/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx @@ -26,7 +26,7 @@ const ModelIcon: FC = ({ return ( model-icon ) diff --git a/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx b/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx index c618b6f1a9..768f2c2766 100644 --- a/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx +++ b/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx @@ -16,7 +16,7 @@ const ProviderIcon: FC = ({ return ( provider-icon ) diff --git a/web/app/components/header/header-wrapper.tsx b/web/app/components/header/header-wrapper.tsx index 205a379a90..360cf8e560 100644 --- a/web/app/components/header/header-wrapper.tsx +++ b/web/app/components/header/header-wrapper.tsx @@ -11,7 +11,7 @@ const HeaderWrapper = ({ children, }: HeaderWrapperProps) => { const pathname = usePathname() - const isBordered = ['/apps', '/datasets', '/datasets/create', '/tools'].includes(pathname) + const isBordered = ['/apps', '/datasets', '/datasets/create', '/tools', '/account'].includes(pathname) return (
{ const router = useRouter() const searchParams = useSearchParams() - const consoleToken = searchParams.get('console_token') + const consoleToken = searchParams.get('access_token') + const refreshToken = searchParams.get('refresh_token') const consoleTokenFromLocalStorage = localStorage?.getItem('console_token') + const refreshTokenFromLocalStorage = localStorage?.getItem('refresh_token') const [init, setInit] = useState(false) + const { getNewAccessToken } = useRefreshToken() useEffect(() => { - if (!(consoleToken || consoleTokenFromLocalStorage)) + if (!(consoleToken || refreshToken || consoleTokenFromLocalStorage || refreshTokenFromLocalStorage)) { router.replace('/signin') - - if (consoleToken) { - localStorage?.setItem('console_token', consoleToken!) - router.replace('/apps', { forceOptimisticNavigation: false } as any) + return } + if (consoleTokenFromLocalStorage && refreshTokenFromLocalStorage) + getNewAccessToken(consoleTokenFromLocalStorage, refreshTokenFromLocalStorage) + + if (consoleToken && refreshToken) { + localStorage.setItem('console_token', consoleToken) + localStorage.setItem('refresh_token', refreshToken) + getNewAccessToken(consoleToken, refreshToken).then(() => { + router.replace('/apps', { forceOptimisticNavigation: false } as any) + }).catch(() => { + router.replace('/signin') + }) + } + setInit(true) }, []) diff --git a/web/app/components/workflow/hooks/use-workflow-start-run.tsx b/web/app/components/workflow/hooks/use-workflow-start-run.tsx index b2b1c69975..77e959b573 100644 --- a/web/app/components/workflow/hooks/use-workflow-start-run.tsx +++ b/web/app/components/workflow/hooks/use-workflow-start-run.tsx @@ -1,17 +1,25 @@ import { useCallback } from 'react' import { useStoreApi } from 'reactflow' +import { useTranslation } from 'react-i18next' import { useWorkflowStore } from '../store' import { BlockEnum, WorkflowRunningStatus, } from '../types' +import type { KnowledgeRetrievalNodeType } from '../nodes/knowledge-retrieval/types' +import type { Node } from '../types' +import { useWorkflow } from './use-workflow' import { useIsChatMode, useNodesSyncDraft, useWorkflowInteractions, useWorkflowRun, } from './index' +import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations' +import { useCurrentProviderAndModel, useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks' import { useFeaturesStore } from '@/app/components/base/features/hooks' +import KnowledgeRetrievalDefault from '@/app/components/workflow/nodes/knowledge-retrieval/default' +import Toast from '@/app/components/base/toast' export const useWorkflowStartRun = () => { const store = useStoreApi() @@ -20,7 +28,26 @@ export const useWorkflowStartRun = () => { const isChatMode = useIsChatMode() const { handleCancelDebugAndPreviewPanel } = useWorkflowInteractions() const { handleRun } = useWorkflowRun() + const { isFromStartNode } = useWorkflow() const { doSyncWorkflowDraft } = useNodesSyncDraft() + const { checkValid: checkKnowledgeRetrievalValid } = KnowledgeRetrievalDefault + const { t } = useTranslation() + const { + modelList: rerankModelList, + defaultModel: rerankDefaultModel, + } = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.rerank) + + const { + currentModel, + } = useCurrentProviderAndModel( + rerankModelList, + rerankDefaultModel + ? { + ...rerankDefaultModel, + provider: rerankDefaultModel.provider.provider, + } + : undefined, + ) const handleWorkflowStartRunInWorkflow = useCallback(async () => { const { @@ -33,6 +60,9 @@ export const useWorkflowStartRun = () => { const { getNodes } = store.getState() const nodes = getNodes() const startNode = nodes.find(node => node.data.type === BlockEnum.Start) + const knowledgeRetrievalNodes = nodes.filter((node: Node) => + node.data.type === BlockEnum.KnowledgeRetrieval, + ) const startVariables = startNode?.data.variables || [] const fileSettings = featuresStore!.getState().features.file const { @@ -42,6 +72,31 @@ export const useWorkflowStartRun = () => { setShowEnvPanel, } = workflowStore.getState() + if (knowledgeRetrievalNodes.length > 0) { + for (const node of knowledgeRetrievalNodes) { + if (isFromStartNode(node.id)) { + const res = checkKnowledgeRetrievalValid(node.data, t) + if (!res.isValid || !currentModel || !rerankDefaultModel) { + const errorMessage = res.errorMessage + if (errorMessage) { + Toast.notify({ + type: 'error', + message: errorMessage, + }) + return false + } + else { + Toast.notify({ + type: 'error', + message: t('appDebug.datasetConfig.rerankModelRequired'), + }) + return false + } + } + } + } + } + setShowEnvPanel(false) if (showDebugAndPreviewPanel) { diff --git a/web/app/components/workflow/hooks/use-workflow.ts b/web/app/components/workflow/hooks/use-workflow.ts index 0e51ea0b37..3c27b5c91b 100644 --- a/web/app/components/workflow/hooks/use-workflow.ts +++ b/web/app/components/workflow/hooks/use-workflow.ts @@ -235,6 +235,33 @@ export const useWorkflow = () => { return nodes.filter(node => node.parentId === nodeId) }, [store]) + const isFromStartNode = useCallback((nodeId: string) => { + const { getNodes } = store.getState() + const nodes = getNodes() + const currentNode = nodes.find(node => node.id === nodeId) + + if (!currentNode) + return false + + if (currentNode.data.type === BlockEnum.Start) + return true + + const checkPreviousNodes = (node: Node) => { + const previousNodes = getBeforeNodeById(node.id) + + for (const prevNode of previousNodes) { + if (prevNode.data.type === BlockEnum.Start) + return true + if (checkPreviousNodes(prevNode)) + return true + } + + return false + } + + return checkPreviousNodes(currentNode) + }, [store, getBeforeNodeById]) + const handleOutVarRenameChange = useCallback((nodeId: string, oldValeSelector: ValueSelector, newVarSelector: ValueSelector) => { const { getNodes, setNodes } = store.getState() const afterNodes = getAfterNodesInSameBranch(nodeId) @@ -389,6 +416,7 @@ export const useWorkflow = () => { checkParallelLimit, checkNestedParallelLimit, isValidConnection, + isFromStartNode, formatTimeFromNow, getNode, getBeforeNodeById, diff --git a/web/app/layout.tsx b/web/app/layout.tsx index e9242edfad..48e35c50e0 100644 --- a/web/app/layout.tsx +++ b/web/app/layout.tsx @@ -2,7 +2,6 @@ import type { Viewport } from 'next' import I18nServer from './components/i18n-server' import BrowserInitor from './components/browser-initor' import SentryInitor from './components/sentry-initor' -import Topbar from './components/base/topbar' import { getLocaleOnServer } from '@/i18n/server' import './styles/globals.css' import './styles/markdown.scss' @@ -45,7 +44,6 @@ const LocaleLayout = ({ data-public-site-about={process.env.NEXT_PUBLIC_SITE_ABOUT} data-public-text-generation-timeout-ms={process.env.NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS} > - {children} diff --git a/web/app/signin/normalForm.tsx b/web/app/signin/normalForm.tsx index 816df8007d..0ae4eb1f43 100644 --- a/web/app/signin/normalForm.tsx +++ b/web/app/signin/normalForm.tsx @@ -11,6 +11,7 @@ import { IS_CE_EDITION, SUPPORT_MAIL_LOGIN, apiPrefix, emailRegex } from '@/conf import Button from '@/app/components/base/button' import { login, oauth } from '@/service/common' import { getPurifyHref } from '@/utils' +import useRefreshToken from '@/hooks/use-refresh-token' type IState = { formValid: boolean @@ -61,6 +62,7 @@ function reducer(state: IState, action: IAction) { const NormalForm = () => { const { t } = useTranslation() + const { getNewAccessToken } = useRefreshToken() const useEmailLogin = IS_CE_EDITION || SUPPORT_MAIL_LOGIN const router = useRouter() @@ -95,7 +97,9 @@ const NormalForm = () => { }, }) if (res.result === 'success') { - localStorage.setItem('console_token', res.data) + localStorage.setItem('console_token', res.data.access_token) + localStorage.setItem('refresh_token', res.data.refresh_token) + getNewAccessToken(res.data.access_token, res.data.refresh_token) router.replace('/apps') } else { diff --git a/web/app/signin/userSSOForm.tsx b/web/app/signin/userSSOForm.tsx index 9cd889a0a5..e4b61413bc 100644 --- a/web/app/signin/userSSOForm.tsx +++ b/web/app/signin/userSSOForm.tsx @@ -7,6 +7,7 @@ import cn from '@/utils/classnames' import Toast from '@/app/components/base/toast' import { getUserOAuth2SSOUrl, getUserOIDCSSOUrl, getUserSAMLSSOUrl } from '@/service/sso' import Button from '@/app/components/base/button' +import useRefreshToken from '@/hooks/use-refresh-token' type UserSSOFormProps = { protocol: string @@ -15,8 +16,10 @@ type UserSSOFormProps = { const UserSSOForm: FC = ({ protocol, }) => { + const { getNewAccessToken } = useRefreshToken() const searchParams = useSearchParams() - const consoleToken = searchParams.get('console_token') + const consoleToken = searchParams.get('access_token') + const refreshToken = searchParams.get('refresh_token') const message = searchParams.get('message') const router = useRouter() @@ -25,8 +28,10 @@ const UserSSOForm: FC = ({ const [isLoading, setIsLoading] = useState(false) useEffect(() => { - if (consoleToken) { + if (refreshToken && consoleToken) { localStorage.setItem('console_token', consoleToken) + localStorage.setItem('refresh_token', refreshToken) + getNewAccessToken(consoleToken, refreshToken) router.replace('/apps') } @@ -36,7 +41,7 @@ const UserSSOForm: FC = ({ message, }) } - }, []) + }, [consoleToken, refreshToken, message, router]) const handleSSOLogin = () => { setIsLoading(true) diff --git a/web/docker/entrypoint.sh b/web/docker/entrypoint.sh index fc4a8f45bc..ad4b17a476 100755 --- a/web/docker/entrypoint.sh +++ b/web/docker/entrypoint.sh @@ -22,5 +22,6 @@ export NEXT_PUBLIC_SITE_ABOUT=${SITE_ABOUT} export NEXT_TELEMETRY_DISABLED=${NEXT_TELEMETRY_DISABLED} export NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS=${TEXT_GENERATION_TIMEOUT_MS} +export NEXT_PUBLIC_CSP_WHITELIST=${CSP_WHITELIST} pm2 start ./pm2.json --no-daemon diff --git a/web/hooks/use-app-favicon.ts b/web/hooks/use-app-favicon.ts index 1ff743928f..e8a0173371 100644 --- a/web/hooks/use-app-favicon.ts +++ b/web/hooks/use-app-favicon.ts @@ -21,7 +21,7 @@ export function useAppFavicon(options: UseAppFaviconOptions) { } = options useAsyncEffect(async () => { - if (!enable) + if (!enable || (icon_type === 'image' && !icon_url) || (icon_type === 'emoji' && !icon)) return const isValidImageIcon = icon_type === 'image' && icon_url diff --git a/web/hooks/use-refresh-token.ts b/web/hooks/use-refresh-token.ts new file mode 100644 index 0000000000..3d8779636f --- /dev/null +++ b/web/hooks/use-refresh-token.ts @@ -0,0 +1,92 @@ +'use client' +import { useCallback, useEffect, useRef } from 'react' +import { jwtDecode } from 'jwt-decode' +import dayjs from 'dayjs' +import utc from 'dayjs/plugin/utc' +import { useRouter } from 'next/navigation' +import type { CommonResponse } from '@/models/common' +import { fetchNewToken } from '@/service/common' +import { fetchWithRetry } from '@/utils' + +dayjs.extend(utc) + +const useRefreshToken = () => { + const router = useRouter() + const timer = useRef() + const advanceTime = useRef(5 * 60 * 1000) + const interval = useRef(55 * 60 * 1000) + + const getExpireTime = useCallback((token: string) => { + if (!token) + return 0 + const decoded = jwtDecode(token) + return (decoded.exp || 0) * 1000 + }, []) + + const getCurrentTimeStamp = useCallback(() => { + return dayjs.utc().valueOf() + }, []) + + const handleError = useCallback(() => { + localStorage?.removeItem('is_refreshing') + localStorage?.removeItem('console_token') + localStorage?.removeItem('refresh_token') + localStorage?.removeItem('last_refresh_time') + router.replace('/signin') + }, []) + + const getNewAccessToken = useCallback(async (currentAccessToken: string, currentRefreshToken: string) => { + if (localStorage?.getItem('is_refreshing') === '1') + return null + const currentTokenExpireTime = getExpireTime(currentAccessToken) + let lastRefreshTime = parseInt(localStorage?.getItem('last_refresh_time') || '0') + lastRefreshTime = isNaN(lastRefreshTime) ? 0 : lastRefreshTime + if (getCurrentTimeStamp() + advanceTime.current > currentTokenExpireTime + && lastRefreshTime + interval.current < getCurrentTimeStamp()) { + localStorage?.setItem('is_refreshing', '1') + const [e, res] = await fetchWithRetry(fetchNewToken({ + body: { refresh_token: currentRefreshToken }, + }) as Promise) + if (e) { + handleError() + return e + } + const { access_token, refresh_token } = res.data + localStorage?.setItem('is_refreshing', '0') + localStorage?.setItem('last_refresh_time', getCurrentTimeStamp().toString()) + localStorage?.setItem('console_token', access_token) + localStorage?.setItem('refresh_token', refresh_token) + const newTokenExpireTime = getExpireTime(access_token) + timer.current = setTimeout(() => { + const consoleTokenFromLocalStorage = localStorage?.getItem('console_token') + const refreshTokenFromLocalStorage = localStorage?.getItem('refresh_token') + if (consoleTokenFromLocalStorage && refreshTokenFromLocalStorage) + getNewAccessToken(consoleTokenFromLocalStorage, refreshTokenFromLocalStorage) + }, newTokenExpireTime - advanceTime.current - getCurrentTimeStamp()) + } + else { + const newTokenExpireTime = getExpireTime(currentAccessToken) + timer.current = setTimeout(() => { + const consoleTokenFromLocalStorage = localStorage?.getItem('console_token') + const refreshTokenFromLocalStorage = localStorage?.getItem('refresh_token') + if (consoleTokenFromLocalStorage && refreshTokenFromLocalStorage) + getNewAccessToken(consoleTokenFromLocalStorage, refreshTokenFromLocalStorage) + }, newTokenExpireTime - advanceTime.current - getCurrentTimeStamp()) + } + return null + }, [getExpireTime, getCurrentTimeStamp, handleError]) + + useEffect(() => { + return () => { + clearTimeout(timer.current) + localStorage?.removeItem('is_refreshing') + localStorage?.removeItem('last_refresh_time') + } + }, []) + + return { + getNewAccessToken, + } +} + +export default useRefreshToken diff --git a/web/i18n/de-DE/common.ts b/web/i18n/de-DE/common.ts index 6ea06bc8b1..8b221ca3bb 100644 --- a/web/i18n/de-DE/common.ts +++ b/web/i18n/de-DE/common.ts @@ -167,6 +167,9 @@ const translation = { delete: 'Konto löschen', deleteTip: 'Wenn Sie Ihr Konto löschen, werden alle Ihre Daten dauerhaft gelöscht und können nicht wiederhergestellt werden.', deleteConfirmTip: 'Zur Bestätigung senden Sie bitte Folgendes von Ihrer registrierten E-Mail-Adresse an ', + myAccount: 'Mein Konto', + studio: 'Dify Studio', + account: 'Konto', }, members: { team: 'Team', diff --git a/web/i18n/de-DE/workflow.ts b/web/i18n/de-DE/workflow.ts index c01d0e6f99..b6d0e8cde4 100644 --- a/web/i18n/de-DE/workflow.ts +++ b/web/i18n/de-DE/workflow.ts @@ -182,6 +182,7 @@ const translation = { rerankModel: 'Neusortierungsmodell', }, invalidVariable: 'Ungültige Variable', + rerankModelRequired: 'Bevor Sie das Rerank-Modell aktivieren, bestätigen Sie bitte, dass das Modell in den Einstellungen erfolgreich konfiguriert wurde.', }, singleRun: { testRun: 'Testlauf ', diff --git a/web/i18n/en-US/common.ts b/web/i18n/en-US/common.ts index 098a3eeafb..5b82ecf8be 100644 --- a/web/i18n/en-US/common.ts +++ b/web/i18n/en-US/common.ts @@ -144,7 +144,7 @@ const translation = { logout: 'Log out', }, settings: { - accountGroup: 'ACCOUNT', + accountGroup: 'GENERAL', workplaceGroup: 'WORKSPACE', account: 'My account', members: 'Members', @@ -157,6 +157,9 @@ const translation = { apiBasedExtension: 'API Extension', }, account: { + account: 'Account', + myAccount: 'My Account', + studio: 'Dify Studio', avatar: 'Avatar', name: 'Name', email: 'Email', diff --git a/web/i18n/en-US/workflow.ts b/web/i18n/en-US/workflow.ts index af2e4e8c0a..2c1109a486 100644 --- a/web/i18n/en-US/workflow.ts +++ b/web/i18n/en-US/workflow.ts @@ -173,6 +173,7 @@ const translation = { }, errorMsg: { fieldRequired: '{{field}} is required', + rerankModelRequired: 'Before turning on the Rerank Model, please confirm that the model has been successfully configured in the settings.', authRequired: 'Authorization is required', invalidJson: '{{field}} is invalid JSON', fields: { diff --git a/web/i18n/es-ES/common.ts b/web/i18n/es-ES/common.ts index 59a05f63d8..748c9d152d 100644 --- a/web/i18n/es-ES/common.ts +++ b/web/i18n/es-ES/common.ts @@ -171,6 +171,9 @@ const translation = { delete: 'Eliminar cuenta', deleteTip: 'Eliminar tu cuenta borrará permanentemente todos tus datos y no se podrán recuperar.', deleteConfirmTip: 'Para confirmar, por favor envía lo siguiente desde tu correo electrónico registrado a ', + account: 'Cuenta', + myAccount: 'Mi Cuenta', + studio: 'Estudio Dify', }, members: { team: 'Equipo', diff --git a/web/i18n/es-ES/workflow.ts b/web/i18n/es-ES/workflow.ts index 2260631d0f..275149a056 100644 --- a/web/i18n/es-ES/workflow.ts +++ b/web/i18n/es-ES/workflow.ts @@ -182,6 +182,7 @@ const translation = { rerankModel: 'Modelo de reordenamiento', }, invalidVariable: 'Variable no válida', + rerankModelRequired: 'Antes de activar el modelo de reclasificación, confirme que el modelo se ha configurado correctamente en la configuración.', }, singleRun: { testRun: 'Ejecución de prueba', diff --git a/web/i18n/fa-IR/common.ts b/web/i18n/fa-IR/common.ts index c75ab11a63..a369a0ba5e 100644 --- a/web/i18n/fa-IR/common.ts +++ b/web/i18n/fa-IR/common.ts @@ -171,6 +171,9 @@ const translation = { delete: 'حذف حساب کاربری', deleteTip: 'حذف حساب کاربری شما تمام داده‌های شما را به طور دائمی پاک می‌کند و قابل بازیابی نیست.', deleteConfirmTip: 'برای تأیید، لطفاً موارد زیر را از ایمیل ثبت‌نام شده خود به این آدرس ارسال کنید ', + account: 'حساب', + myAccount: 'حساب من', + studio: 'استودیو Dify', }, members: { team: 'تیم', diff --git a/web/i18n/fa-IR/workflow.ts b/web/i18n/fa-IR/workflow.ts index eb36dfdc88..609f446b43 100644 --- a/web/i18n/fa-IR/workflow.ts +++ b/web/i18n/fa-IR/workflow.ts @@ -182,6 +182,7 @@ const translation = { rerankModel: 'مدل مجدد رتبه‌بندی', }, invalidVariable: 'متغیر نامعتبر', + rerankModelRequired: 'قبل از روشن کردن Rerank Model، لطفا تأیید کنید که مدل با موفقیت در تنظیمات پیکربندی شده است.', }, singleRun: { testRun: 'اجرای آزمایشی', diff --git a/web/i18n/fr-FR/common.ts b/web/i18n/fr-FR/common.ts index c4fed4405d..0cd301aed2 100644 --- a/web/i18n/fr-FR/common.ts +++ b/web/i18n/fr-FR/common.ts @@ -167,6 +167,9 @@ const translation = { delete: 'Supprimer le compte', deleteTip: 'La suppression de votre compte effacera définitivement toutes vos données et elles ne pourront pas être récupérées.', deleteConfirmTip: 'Pour confirmer, veuillez envoyer ce qui suit depuis votre adresse e-mail enregistrée à ', + myAccount: 'Mon compte', + account: 'Compte', + studio: 'Dify Studio', }, members: { team: 'Équipe', diff --git a/web/i18n/fr-FR/workflow.ts b/web/i18n/fr-FR/workflow.ts index 878d25804e..068c41b853 100644 --- a/web/i18n/fr-FR/workflow.ts +++ b/web/i18n/fr-FR/workflow.ts @@ -182,6 +182,7 @@ const translation = { rerankModel: 'Modèle de rerank', }, invalidVariable: 'Variable invalide', + rerankModelRequired: 'Avant d’activer le modèle de reclassement, veuillez confirmer que le modèle a été correctement configuré dans les paramètres.', }, singleRun: { testRun: 'Exécution de test', diff --git a/web/i18n/hi-IN/common.ts b/web/i18n/hi-IN/common.ts index 256cb9d426..224090437e 100644 --- a/web/i18n/hi-IN/common.ts +++ b/web/i18n/hi-IN/common.ts @@ -177,6 +177,9 @@ const translation = { deleteConfirmTip: 'पुष्टि करने के लिए, कृपया अपने पंजीकृत ईमेल से निम्नलिखित भेजें', delete: 'खाता हटाएं', deleteTip: 'अपना खाता हटाने से आपका सारा डेटा स्थायी रूप से मिट जाएगा और इसे पुनर्प्राप्त नहीं किया जा सकता है।', + account: 'खाता', + studio: 'डिफाई स्टूडियो', + myAccount: 'मेरा खाता', }, members: { team: 'टीम', diff --git a/web/i18n/hi-IN/workflow.ts b/web/i18n/hi-IN/workflow.ts index ac356c2067..e402200462 100644 --- a/web/i18n/hi-IN/workflow.ts +++ b/web/i18n/hi-IN/workflow.ts @@ -185,6 +185,7 @@ const translation = { rerankModel: 'पुनः रैंक मॉडल', }, invalidVariable: 'अमान्य वेरिएबल', + rerankModelRequired: 'Rerank मॉडल चालू करने से पहले, कृपया पुष्टि करें कि मॉडल को सेटिंग्स में सफलतापूर्वक कॉन्फ़िगर किया गया है।', }, singleRun: { testRun: 'परीक्षण रन', diff --git a/web/i18n/it-IT/common.ts b/web/i18n/it-IT/common.ts index aa675bb471..5c180a8b69 100644 --- a/web/i18n/it-IT/common.ts +++ b/web/i18n/it-IT/common.ts @@ -179,6 +179,9 @@ const translation = { 'Eliminando il tuo account cancellerai permanentemente tutti i tuoi dati e non sarà possibile recuperarli.', deleteConfirmTip: 'Per confermare, invia il seguente messaggio dalla tua email registrata a ', + myAccount: 'Il mio account', + account: 'Conto', + studio: 'Dify Studio', }, members: { team: 'Team', diff --git a/web/i18n/it-IT/workflow.ts b/web/i18n/it-IT/workflow.ts index 0427a45cd9..ce460ed252 100644 --- a/web/i18n/it-IT/workflow.ts +++ b/web/i18n/it-IT/workflow.ts @@ -187,6 +187,7 @@ const translation = { rerankModel: 'Modello Rerank', }, invalidVariable: 'Variabile non valida', + rerankModelRequired: 'Prima di attivare il modello di reranking, conferma che il modello è stato configurato correttamente nelle impostazioni.', }, singleRun: { testRun: 'Esecuzione Test ', diff --git a/web/i18n/ja-JP/common.ts b/web/i18n/ja-JP/common.ts index e2517a619d..bd50e68b09 100644 --- a/web/i18n/ja-JP/common.ts +++ b/web/i18n/ja-JP/common.ts @@ -171,6 +171,9 @@ const translation = { delete: 'アカウントを削除', deleteTip: 'アカウントを削除すると、すべてのデータが完全に消去され、復元できなくなります。', deleteConfirmTip: '確認のため、登録したメールから次の内容をに送信してください ', + account: 'アカウント', + myAccount: 'マイアカウント', + studio: 'Difyスタジオ', }, members: { team: 'チーム', diff --git a/web/i18n/ja-JP/workflow.ts b/web/i18n/ja-JP/workflow.ts index 48c2019601..2906f7ef8c 100644 --- a/web/i18n/ja-JP/workflow.ts +++ b/web/i18n/ja-JP/workflow.ts @@ -182,6 +182,7 @@ const translation = { rerankModel: 'Rerankモデル', }, invalidVariable: '無効な変数', + rerankModelRequired: 'モデルの再ランク付けをオンにする前に、モデルが設定で正常に構成されていることを確認してください。', }, singleRun: { testRun: 'テスト実行', diff --git a/web/i18n/ko-KR/common.ts b/web/i18n/ko-KR/common.ts index 8ef55da3f7..d70b7ebb10 100644 --- a/web/i18n/ko-KR/common.ts +++ b/web/i18n/ko-KR/common.ts @@ -163,6 +163,9 @@ const translation = { delete: '계정 삭제', deleteTip: '계정을 삭제하면 모든 데이터가 영구적으로 지워지며 복구할 수 없습니다.', deleteConfirmTip: '확인하려면 등록된 이메일에서 다음 내용을 로 보내주세요 ', + myAccount: '내 계정', + studio: '디파이 스튜디오', + account: '계좌', }, members: { team: '팀', diff --git a/web/i18n/ko-KR/workflow.ts b/web/i18n/ko-KR/workflow.ts index 4a97943790..99d5c47c0b 100644 --- a/web/i18n/ko-KR/workflow.ts +++ b/web/i18n/ko-KR/workflow.ts @@ -182,6 +182,7 @@ const translation = { rerankModel: '재정렬 모델', }, invalidVariable: '잘못된 변수', + rerankModelRequired: 'Rerank Model을 켜기 전에 설정에서 모델이 성공적으로 구성되었는지 확인하십시오.', }, singleRun: { testRun: '테스트 실행', diff --git a/web/i18n/pl-PL/common.ts b/web/i18n/pl-PL/common.ts index 91f5fb2899..b070678785 100644 --- a/web/i18n/pl-PL/common.ts +++ b/web/i18n/pl-PL/common.ts @@ -173,6 +173,9 @@ const translation = { delete: 'Usuń konto', deleteTip: 'Usunięcie konta spowoduje trwałe usunięcie wszystkich danych i nie będzie można ich odzyskać.', deleteConfirmTip: 'Aby potwierdzić, wyślij następujące informacje z zarejestrowanego adresu e-mail na adres ', + myAccount: 'Moje konto', + studio: 'Dify Studio', + account: 'Rachunek', }, members: { team: 'Zespół', diff --git a/web/i18n/pl-PL/workflow.ts b/web/i18n/pl-PL/workflow.ts index 41927668f7..b26c429fb1 100644 --- a/web/i18n/pl-PL/workflow.ts +++ b/web/i18n/pl-PL/workflow.ts @@ -182,6 +182,7 @@ const translation = { rerankModel: 'Model rerank', }, invalidVariable: 'Nieprawidłowa zmienna', + rerankModelRequired: 'Przed włączeniem Rerank Model upewnij się, że model został pomyślnie skonfigurowany w ustawieniach.', }, singleRun: { testRun: 'Testowe uruchomienie ', diff --git a/web/i18n/pt-BR/common.ts b/web/i18n/pt-BR/common.ts index f9e9eb7888..9343fdf560 100644 --- a/web/i18n/pt-BR/common.ts +++ b/web/i18n/pt-BR/common.ts @@ -167,6 +167,9 @@ const translation = { delete: 'Excluir conta', deleteTip: 'Excluir sua conta apagará permanentemente todos os seus dados e eles não poderão ser recuperados.', deleteConfirmTip: 'Para confirmar, envie o seguinte do seu e-mail registrado para ', + myAccount: 'Minha Conta', + account: 'Conta', + studio: 'Estúdio Dify', }, members: { team: 'Equipe', diff --git a/web/i18n/pt-BR/workflow.ts b/web/i18n/pt-BR/workflow.ts index 222fc788bf..9092ccda3e 100644 --- a/web/i18n/pt-BR/workflow.ts +++ b/web/i18n/pt-BR/workflow.ts @@ -182,6 +182,7 @@ const translation = { rerankModel: 'Modelo de reordenação', }, invalidVariable: 'Variável inválida', + rerankModelRequired: 'Antes de ativar o modelo de reclassificação, confirme se o modelo foi configurado com sucesso nas configurações.', }, singleRun: { testRun: 'Execução de teste ', diff --git a/web/i18n/ro-RO/common.ts b/web/i18n/ro-RO/common.ts index 1fd8778106..dc3bfcc45a 100644 --- a/web/i18n/ro-RO/common.ts +++ b/web/i18n/ro-RO/common.ts @@ -167,6 +167,9 @@ const translation = { delete: 'Șterge contul', deleteTip: 'Ștergerea contului vă va șterge definitiv toate datele și nu pot fi recuperate.', deleteConfirmTip: 'Pentru a confirma, trimiteți următoarele din e-mailul înregistrat la ', + account: 'Cont', + studio: 'Dify Studio', + myAccount: 'Contul meu', }, members: { team: 'Echipă', diff --git a/web/i18n/ro-RO/workflow.ts b/web/i18n/ro-RO/workflow.ts index ac4b718b07..bb66169da8 100644 --- a/web/i18n/ro-RO/workflow.ts +++ b/web/i18n/ro-RO/workflow.ts @@ -182,6 +182,7 @@ const translation = { rerankModel: 'Model de rerankare', }, invalidVariable: 'Variabilă invalidă', + rerankModelRequired: 'Înainte de a activa modelul de reclasificare, vă rugăm să confirmați că modelul a fost configurat cu succes în setări.', }, singleRun: { testRun: 'Rulare de test ', diff --git a/web/i18n/ru-RU/common.ts b/web/i18n/ru-RU/common.ts index 82e3471e60..a829fb27b1 100644 --- a/web/i18n/ru-RU/common.ts +++ b/web/i18n/ru-RU/common.ts @@ -171,6 +171,9 @@ const translation = { delete: 'Удалить учетную запись', deleteTip: 'Удаление вашей учетной записи приведет к безвозвратному удалению всех ваших данных, и их невозможно будет восстановить.', deleteConfirmTip: 'Для подтверждения, пожалуйста, отправьте следующее с вашего зарегистрированного адреса электронной почты на ', + account: 'Счет', + studio: 'Студия Dify', + myAccount: 'Моя учетная запись', }, members: { team: 'Команда', diff --git a/web/i18n/ru-RU/workflow.ts b/web/i18n/ru-RU/workflow.ts index 1931863895..5b2bc7e290 100644 --- a/web/i18n/ru-RU/workflow.ts +++ b/web/i18n/ru-RU/workflow.ts @@ -182,6 +182,7 @@ const translation = { rerankModel: 'Модель переранжирования', }, invalidVariable: 'Неверная переменная', + rerankModelRequired: 'Перед включением модели повторного ранжирования убедитесь, что модель успешно настроена в настройках.', }, singleRun: { testRun: 'Тестовый запуск ', diff --git a/web/i18n/tr-TR/common.ts b/web/i18n/tr-TR/common.ts index a41925cd20..dc4b1cccba 100644 --- a/web/i18n/tr-TR/common.ts +++ b/web/i18n/tr-TR/common.ts @@ -171,6 +171,9 @@ const translation = { delete: 'Hesabı Sil', deleteTip: 'Hesabınızı silmek tüm verilerinizi kalıcı olarak siler ve geri alınamaz.', deleteConfirmTip: 'Onaylamak için, kayıtlı e-postanızdan şu adrese e-posta gönderin: ', + account: 'Hesap', + myAccount: 'Hesabım', + studio: 'Dify Stüdyo', }, members: { team: 'Takım', diff --git a/web/i18n/tr-TR/workflow.ts b/web/i18n/tr-TR/workflow.ts index a33a3724ad..8e1ce59630 100644 --- a/web/i18n/tr-TR/workflow.ts +++ b/web/i18n/tr-TR/workflow.ts @@ -182,6 +182,7 @@ const translation = { rerankModel: 'Yeniden Sıralama Modeli', }, invalidVariable: 'Geçersiz değişken', + rerankModelRequired: 'Yeniden Sıralama Modelini açmadan önce, lütfen ayarlarda modelin başarıyla yapılandırıldığını onaylayın.', }, singleRun: { testRun: 'Test Çalıştırma', diff --git a/web/i18n/uk-UA/common.ts b/web/i18n/uk-UA/common.ts index cc70772be3..ef0bc55203 100644 --- a/web/i18n/uk-UA/common.ts +++ b/web/i18n/uk-UA/common.ts @@ -167,6 +167,9 @@ const translation = { delete: 'Видалити обліковий запис', deleteTip: 'Видалення вашого облікового запису призведе до остаточного видалення всіх ваших даних, і їх неможливо буде відновити.', deleteConfirmTip: 'Щоб підтвердити, будь ласка, надішліть наступне з вашої зареєстрованої електронної пошти на ', + account: 'Рахунок', + studio: 'Студія Dify', + myAccount: 'Особистий кабінет', }, members: { team: 'Команда', diff --git a/web/i18n/uk-UA/workflow.ts b/web/i18n/uk-UA/workflow.ts index e1bea99bcd..f7747541cc 100644 --- a/web/i18n/uk-UA/workflow.ts +++ b/web/i18n/uk-UA/workflow.ts @@ -182,6 +182,7 @@ const translation = { rerankModel: 'Модель повторного ранжування', }, invalidVariable: 'Недійсна змінна', + rerankModelRequired: 'Перед увімкненням Rerank Model, будь ласка, підтвердьте, що модель успішно налаштована в налаштуваннях.', }, singleRun: { testRun: 'Тестовий запуск', diff --git a/web/i18n/vi-VN/common.ts b/web/i18n/vi-VN/common.ts index 252fa7e1df..5336ec4f66 100644 --- a/web/i18n/vi-VN/common.ts +++ b/web/i18n/vi-VN/common.ts @@ -167,6 +167,9 @@ const translation = { delete: 'Xóa tài khoản', deleteTip: 'Xóa tài khoản của bạn sẽ xóa vĩnh viễn tất cả dữ liệu của bạn và không thể khôi phục được.', deleteConfirmTip: 'Để xác nhận, vui lòng gửi thông tin sau từ email đã đăng ký của bạn tới ', + studio: 'Dify Studio', + myAccount: 'Tài khoản của tôi', + account: 'Tài khoản', }, members: { team: 'Nhóm', diff --git a/web/i18n/vi-VN/workflow.ts b/web/i18n/vi-VN/workflow.ts index 5e6f0e0063..aa9fbf865d 100644 --- a/web/i18n/vi-VN/workflow.ts +++ b/web/i18n/vi-VN/workflow.ts @@ -182,6 +182,7 @@ const translation = { rerankModel: 'Mô hình xếp hạng lại', }, invalidVariable: 'Biến không hợp lệ', + rerankModelRequired: 'Trước khi bật Mô hình xếp hạng lại, vui lòng xác nhận rằng mô hình đã được định cấu hình thành công trong cài đặt.', }, singleRun: { testRun: 'Chạy thử nghiệm ', diff --git a/web/i18n/zh-Hans/common.ts b/web/i18n/zh-Hans/common.ts index 8140e6b215..21e69e666d 100644 --- a/web/i18n/zh-Hans/common.ts +++ b/web/i18n/zh-Hans/common.ts @@ -144,7 +144,7 @@ const translation = { logout: '登出', }, settings: { - accountGroup: '账户', + accountGroup: '通用', workplaceGroup: '工作空间', account: '我的账户', members: '成员', @@ -157,6 +157,9 @@ const translation = { apiBasedExtension: 'API 扩展', }, account: { + account: '账户', + myAccount: '我的账户', + studio: 'Dify 工作室', avatar: '头像', name: '用户名', email: '邮箱', diff --git a/web/i18n/zh-Hans/workflow.ts b/web/i18n/zh-Hans/workflow.ts index bcf6cbebff..dc97fc76f8 100644 --- a/web/i18n/zh-Hans/workflow.ts +++ b/web/i18n/zh-Hans/workflow.ts @@ -173,6 +173,7 @@ const translation = { }, errorMsg: { fieldRequired: '{{field}} 不能为空', + rerankModelRequired: '开启 Rerank 模型前,请务必确认模型已在设置中成功配置。', authRequired: '请先授权', invalidJson: '{{field}} 是非法的 JSON', fields: { diff --git a/web/i18n/zh-Hant/common.ts b/web/i18n/zh-Hant/common.ts index 8cd51b1991..e43b49bd3c 100644 --- a/web/i18n/zh-Hant/common.ts +++ b/web/i18n/zh-Hant/common.ts @@ -167,6 +167,9 @@ const translation = { delete: '刪除帳戶', deleteTip: '刪除您的帳戶將永久刪除您的所有資料並且無法恢復。', deleteConfirmTip: '請將以下內容從您的註冊電子郵件發送至 ', + account: '帳戶', + myAccount: '我的帳戶', + studio: 'Dify 工作室', }, members: { team: '團隊', diff --git a/web/i18n/zh-Hant/workflow.ts b/web/i18n/zh-Hant/workflow.ts index 8e1b7529fe..35ed68c437 100644 --- a/web/i18n/zh-Hant/workflow.ts +++ b/web/i18n/zh-Hant/workflow.ts @@ -182,6 +182,7 @@ const translation = { rerankModel: 'Rerank 模型', }, invalidVariable: '無效的變量', + rerankModelRequired: '在開啟 Rerank 模型之前,請在設置中確認模型配置成功。', }, singleRun: { testRun: '測試運行', diff --git a/web/middleware.ts b/web/middleware.ts new file mode 100644 index 0000000000..e0f8f3782f --- /dev/null +++ b/web/middleware.ts @@ -0,0 +1,76 @@ +import type { NextRequest } from 'next/server' +import { NextResponse } from 'next/server' + +const NECESSARY_DOMAIN = '*.sentry.io http://localhost:* http://127.0.0.1:* https://analytics.google.com googletagmanager.com *.googletagmanager.com https://www.google-analytics.com https://api.github.com' + +export function middleware(request: NextRequest) { + const isWhiteListEnabled = !!process.env.NEXT_PUBLIC_CSP_WHITELIST && process.env.NODE_ENV === 'production' + if (!isWhiteListEnabled) + return NextResponse.next() + + const whiteList = `${process.env.NEXT_PUBLIC_CSP_WHITELIST} ${NECESSARY_DOMAIN}` + const nonce = Buffer.from(crypto.randomUUID()).toString('base64') + const csp = `'nonce-${nonce}'` + + const scheme_source = 'data: mediastream: blob: filesystem:' + + const cspHeader = ` + default-src 'self' ${scheme_source} ${csp} ${whiteList}; + connect-src 'self' ${scheme_source} ${csp} ${whiteList}; + script-src 'self' ${scheme_source} ${csp} ${whiteList}; + style-src 'self' 'unsafe-inline' ${scheme_source} ${whiteList}; + worker-src 'self' ${scheme_source} ${csp} ${whiteList}; + media-src 'self' ${scheme_source} ${csp} ${whiteList}; + img-src 'self' ${scheme_source} ${csp} ${whiteList}; + font-src 'self'; + object-src 'none'; + base-uri 'self'; + form-action 'self'; + upgrade-insecure-requests; +` + // Replace newline characters and spaces + const contentSecurityPolicyHeaderValue = cspHeader + .replace(/\s{2,}/g, ' ') + .trim() + + const requestHeaders = new Headers(request.headers) + requestHeaders.set('x-nonce', nonce) + + requestHeaders.set( + 'Content-Security-Policy', + contentSecurityPolicyHeaderValue, + ) + + const response = NextResponse.next({ + request: { + headers: requestHeaders, + }, + }) + response.headers.set( + 'Content-Security-Policy', + contentSecurityPolicyHeaderValue, + ) + + return response +} + +export const config = { + matcher: [ + /* + * Match all request paths except for the ones starting with: + * - api (API routes) + * - _next/static (static files) + * - _next/image (image optimization files) + * - favicon.ico (favicon file) + */ + { + // source: '/((?!api|_next/static|_next/image|favicon.ico).*)', + source: '/((?!_next/static|_next/image|favicon.ico).*)', + // source: '/(.*)', + // missing: [ + // { type: 'header', key: 'next-router-prefetch' }, + // { type: 'header', key: 'purpose', value: 'prefetch' }, + // ], + }, + ], +} diff --git a/web/package.json b/web/package.json index d9534b22d3..73ac76716a 100644 --- a/web/package.json +++ b/web/package.json @@ -55,6 +55,7 @@ "immer": "^9.0.19", "js-audio-recorder": "^1.0.7", "js-cookie": "^3.0.1", + "jwt-decode": "^4.0.0", "katex": "^0.16.10", "lamejs": "^1.2.1", "lexical": "^0.16.0", @@ -63,7 +64,6 @@ "mime": "^4.0.4", "negotiator": "^0.6.3", "next": "^14.1.1", - "next-nprogress-bar": "^2.3.8", "pinyin-pro": "^3.23.0", "qrcode.react": "^3.1.0", "qs": "^6.11.1", diff --git a/web/service/common.ts b/web/service/common.ts index 859c8e4453..b4b7ee1046 100644 --- a/web/service/common.ts +++ b/web/service/common.ts @@ -38,8 +38,21 @@ import type { import type { RETRIEVE_METHOD } from '@/types/app' import type { SystemFeatures } from '@/types/feature' -export const login: Fetcher }> = ({ url, body }) => { - return post(url, { body }) as Promise +type LoginSuccess = { + result: 'success' + data: { access_token: string;refresh_token: string } +} +type LoginFail = { + result: 'fail' + data: string +} +type LoginResponse = LoginSuccess | LoginFail +export const login: Fetcher }> = ({ url, body }) => { + return post(url, { body }) as Promise +} + +export const fetchNewToken: Fetcher }> = ({ body }) => { + return post('/refresh-token', { body }) as Promise } export const setup: Fetcher }> = ({ body }) => { diff --git a/web/utils/index.ts b/web/utils/index.ts index 8afd8afae7..7aa6fef0a8 100644 --- a/web/utils/index.ts +++ b/web/utils/index.ts @@ -39,3 +39,21 @@ export const getPurifyHref = (href: string) => { return escape(href) } + +export async function fetchWithRetry(fn: Promise, retries = 3): Promise<[Error] | [null, T]> { + const [error, res] = await asyncRunSafe(fn) + if (error) { + if (retries > 0) { + const res = await fetchWithRetry(fn, retries - 1) + return res + } + else { + if (error instanceof Error) + return [error] + return [new Error('unknown error')] + } + } + else { + return [null, res] + } +} diff --git a/web/yarn.lock b/web/yarn.lock index d1eede0f70..70ffceb64b 100644 --- a/web/yarn.lock +++ b/web/yarn.lock @@ -2698,7 +2698,7 @@ brace-expansion@^2.0.1: dependencies: balanced-match "^1.0.0" -braces@^3.0.2, braces@~3.0.2: +braces@^3.0.3, braces@~3.0.2: version "3.0.3" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789" integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== @@ -3793,9 +3793,9 @@ domhandler@^5.0.2, domhandler@^5.0.3: domelementtype "^2.3.0" dompurify@^3.0.5: - version "3.0.5" - resolved "https://registry.npmjs.org/dompurify/-/dompurify-3.0.5.tgz" - integrity sha512-F9e6wPGtY+8KNMRAVfxeCOHU0/NPWMSENNq4pQctuXRqqdEPW7q3CrLbR5Nse044WwacyjHGOMlvNsBe1y6z9A== + version "3.1.7" + resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-3.1.7.tgz#711a8c96479fb6ced93453732c160c3c72418a6a" + integrity sha512-VaTstWtsneJY8xzy7DekmYWEOZcmzIe3Qb3zPd4STve1OBTa+e+WmS1ITQec1fZYXI3HCsOZZiSMpG6oxoWMWQ== domutils@^3.0.1: version "3.1.0" @@ -6205,6 +6205,11 @@ jsonc-eslint-parser@^2.0.4, jsonc-eslint-parser@^2.1.0: array-includes "^3.1.5" object.assign "^4.1.3" +jwt-decode@^4.0.0: + version "4.0.0" + resolved "https://registry.npmmirror.com/jwt-decode/-/jwt-decode-4.0.0.tgz#2270352425fd413785b2faf11f6e755c5151bd4b" + integrity sha512-+KJGIyHgkGuIq3IEBNftfhW/LfWhXUIY6OmyVWjliu5KH1y0fw7VQ8YndE2O4qZdMSd9SqbnC8GOcZEy0Om7sA== + katex@^0.16.0, katex@^0.16.10: version "0.16.10" resolved "https://registry.npmjs.org/katex/-/katex-0.16.10.tgz" @@ -7164,11 +7169,11 @@ micromark@~2.11.0: parse-entities "^2.0.0" micromatch@^4.0.4, micromatch@^4.0.5: - version "4.0.5" - resolved "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz" - integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + version "4.0.8" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.8.tgz#d66fa18f3a47076789320b9b1af32bd86d9fa202" + integrity sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA== dependencies: - braces "^3.0.2" + braces "^3.0.3" picomatch "^2.3.1" mime-db@1.52.0: @@ -7283,13 +7288,6 @@ negotiator@^0.6.3: resolved "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz" integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== -next-nprogress-bar@^2.3.8: - version "2.3.11" - resolved "https://registry.npmjs.org/next-nprogress-bar/-/next-nprogress-bar-2.3.11.tgz" - integrity sha512-OjSvsQwgSWa2qBMYO478QreGG9Jt82tr4wTQptmiyzNqqjzHCyKZNkhANnzPrjuFAoelIvmruJuakODofSnvTQ== - dependencies: - nprogress "^0.2.0" - next@^14.1.1: version "14.2.4" resolved "https://registry.npmjs.org/next/-/next-14.2.4.tgz" @@ -7372,11 +7370,6 @@ npm-run-path@^5.1.0: dependencies: path-key "^4.0.0" -nprogress@^0.2.0: - version "0.2.0" - resolved "https://registry.npmjs.org/nprogress/-/nprogress-0.2.0.tgz" - integrity sha512-I19aIingLgR1fmhftnbWWO3dXc0hSxqHQHQb3H8m+K3TnEn/iSeTZZOyvKXWqQESMwuUVnatlCnZdLBZZt2VSA== - nth-check@^2.0.1: version "2.1.1" resolved "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz"