mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-08-14 00:55:57 +08:00
Refa: http API create dataset and test cases (#7393)
### What problem does this PR solve? This PR introduces Pydantic-based validation for the create dataset HTTP API, improving code clarity and robustness. Key changes include: 1. Pydantic Validation 2. Error Handling 3. Test Updates 4. Documentation ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) - [x] Documentation Update - [x] Refactoring
This commit is contained in:
parent
c88e4b3fc0
commit
78380fa181
12
.github/workflows/tests.yml
vendored
12
.github/workflows/tests.yml
vendored
@ -86,7 +86,7 @@ jobs:
|
||||
echo "Waiting for service to be available..."
|
||||
sleep 5
|
||||
done
|
||||
cd sdk/python && uv sync --python 3.10 --frozen && uv pip install . && source .venv/bin/activate && cd test/test_sdk_api && pytest -s --tb=short get_email.py t_dataset.py t_chat.py t_session.py t_document.py t_chunk.py
|
||||
cd sdk/python && uv sync --python 3.10 --group test --frozen && uv pip install . && source .venv/bin/activate && cd test/test_sdk_api && pytest -s --tb=short get_email.py t_dataset.py t_chat.py t_session.py t_document.py t_chunk.py
|
||||
|
||||
- name: Run frontend api tests against Elasticsearch
|
||||
run: |
|
||||
@ -96,7 +96,7 @@ jobs:
|
||||
echo "Waiting for service to be available..."
|
||||
sleep 5
|
||||
done
|
||||
cd sdk/python && uv sync --python 3.10 --frozen && uv pip install . && source .venv/bin/activate && cd test/test_frontend_api && pytest -s --tb=short get_email.py test_dataset.py
|
||||
cd sdk/python && uv sync --python 3.10 --group test --frozen && source .venv/bin/activate && cd test/test_frontend_api && pytest -s --tb=short get_email.py test_dataset.py
|
||||
|
||||
- name: Run http api tests against Elasticsearch
|
||||
run: |
|
||||
@ -106,7 +106,7 @@ jobs:
|
||||
echo "Waiting for service to be available..."
|
||||
sleep 5
|
||||
done
|
||||
cd sdk/python && uv sync --python 3.10 --frozen && uv pip install . && source .venv/bin/activate && cd test/test_http_api && pytest -s --tb=short -m "not slow"
|
||||
cd sdk/python && uv sync --python 3.10 --group test --frozen && source .venv/bin/activate && cd test/test_http_api && pytest -s --tb=short -m "not slow"
|
||||
|
||||
- name: Stop ragflow:nightly
|
||||
if: always() # always run this step even if previous steps failed
|
||||
@ -125,7 +125,7 @@ jobs:
|
||||
echo "Waiting for service to be available..."
|
||||
sleep 5
|
||||
done
|
||||
cd sdk/python && uv sync --python 3.10 --frozen && uv pip install . && source .venv/bin/activate && cd test/test_sdk_api && pytest -s --tb=short get_email.py t_dataset.py t_chat.py t_session.py t_document.py t_chunk.py
|
||||
cd sdk/python && uv sync --python 3.10 --group test --frozen && uv pip install . && source .venv/bin/activate && cd test/test_sdk_api && pytest -s --tb=short get_email.py t_dataset.py t_chat.py t_session.py t_document.py t_chunk.py
|
||||
|
||||
- name: Run frontend api tests against Infinity
|
||||
run: |
|
||||
@ -135,7 +135,7 @@ jobs:
|
||||
echo "Waiting for service to be available..."
|
||||
sleep 5
|
||||
done
|
||||
cd sdk/python && uv sync --python 3.10 --frozen && uv pip install . && source .venv/bin/activate && cd test/test_frontend_api && pytest -s --tb=short get_email.py test_dataset.py
|
||||
cd sdk/python && uv sync --python 3.10 --group test --frozen && source .venv/bin/activate && cd test/test_frontend_api && pytest -s --tb=short get_email.py test_dataset.py
|
||||
|
||||
- name: Run http api tests against Infinity
|
||||
run: |
|
||||
@ -145,7 +145,7 @@ jobs:
|
||||
echo "Waiting for service to be available..."
|
||||
sleep 5
|
||||
done
|
||||
cd sdk/python && uv sync --python 3.10 --frozen && uv pip install . && source .venv/bin/activate && cd test/test_http_api && DOC_ENGINE=infinity pytest -s --tb=short -m "not slow"
|
||||
cd sdk/python && uv sync --python 3.10 --group test --frozen && source .venv/bin/activate && cd test/test_http_api && DOC_ENGINE=infinity pytest -s --tb=short -m "not slow"
|
||||
|
||||
- name: Stop ragflow:nightly
|
||||
if: always() # always run this step even if previous steps failed
|
||||
|
@ -14,24 +14,35 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
from flask import request
|
||||
from api.db import StatusEnum, FileSource
|
||||
from peewee import OperationalError
|
||||
from pydantic import ValidationError
|
||||
|
||||
from api import settings
|
||||
from api.db import FileSource, StatusEnum
|
||||
from api.db.db_models import File
|
||||
from api.db.services.document_service import DocumentService
|
||||
from api.db.services.file2document_service import File2DocumentService
|
||||
from api.db.services.file_service import FileService
|
||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||
from api.db.services.llm_service import TenantLLMService, LLMService
|
||||
from api.db.services.llm_service import LLMService, TenantLLMService
|
||||
from api.db.services.user_service import TenantService
|
||||
from api import settings
|
||||
from api.utils import get_uuid
|
||||
from api.utils.api_utils import (
|
||||
check_duplicate_ids,
|
||||
dataset_readonly_fields,
|
||||
get_error_argument_result,
|
||||
get_error_data_result,
|
||||
get_parser_config,
|
||||
get_result,
|
||||
token_required,
|
||||
get_error_data_result,
|
||||
valid,
|
||||
get_parser_config, valid_parser_config, dataset_readonly_fields,check_duplicate_ids
|
||||
valid_parser_config,
|
||||
)
|
||||
from api.utils.validation_utils import CreateDatasetReq, format_validation_error_message
|
||||
|
||||
|
||||
@manager.route("/datasets", methods=["POST"]) # noqa: F821
|
||||
@ -62,16 +73,28 @@ def create(tenant_id):
|
||||
name:
|
||||
type: string
|
||||
description: Name of the dataset.
|
||||
avatar:
|
||||
type: string
|
||||
description: Base64 encoding of the avatar.
|
||||
description:
|
||||
type: string
|
||||
description: Description of the dataset.
|
||||
embedding_model:
|
||||
type: string
|
||||
description: Embedding model Name.
|
||||
permission:
|
||||
type: string
|
||||
enum: ['me', 'team']
|
||||
description: Dataset permission.
|
||||
chunk_method:
|
||||
type: string
|
||||
enum: ["naive", "manual", "qa", "table", "paper", "book", "laws",
|
||||
"presentation", "picture", "one", "email", "tag"
|
||||
enum: ["naive", "book", "email", "laws", "manual", "one", "paper",
|
||||
"picture", "presentation", "qa", "table", "tag"
|
||||
]
|
||||
description: Chunking method.
|
||||
pagerank:
|
||||
type: integer
|
||||
description: Set page rank.
|
||||
parser_config:
|
||||
type: object
|
||||
description: Parser configuration.
|
||||
@ -84,106 +107,87 @@ def create(tenant_id):
|
||||
data:
|
||||
type: object
|
||||
"""
|
||||
req = request.json
|
||||
for k in req.keys():
|
||||
if dataset_readonly_fields(k):
|
||||
return get_result(code=settings.RetCode.ARGUMENT_ERROR, message=f"'{k}' is readonly.")
|
||||
e, t = TenantService.get_by_id(tenant_id)
|
||||
permission = req.get("permission")
|
||||
chunk_method = req.get("chunk_method")
|
||||
parser_config = req.get("parser_config")
|
||||
valid_parser_config(parser_config)
|
||||
valid_permission = ["me", "team"]
|
||||
valid_chunk_method = [
|
||||
"naive",
|
||||
"manual",
|
||||
"qa",
|
||||
"table",
|
||||
"paper",
|
||||
"book",
|
||||
"laws",
|
||||
"presentation",
|
||||
"picture",
|
||||
"one",
|
||||
"email",
|
||||
"tag"
|
||||
]
|
||||
check_validation = valid(
|
||||
permission,
|
||||
valid_permission,
|
||||
chunk_method,
|
||||
valid_chunk_method,
|
||||
)
|
||||
if check_validation:
|
||||
return check_validation
|
||||
req["parser_config"] = get_parser_config(chunk_method, parser_config)
|
||||
if "tenant_id" in req:
|
||||
return get_error_data_result(message="`tenant_id` must not be provided")
|
||||
if "chunk_count" in req or "document_count" in req:
|
||||
return get_error_data_result(
|
||||
message="`chunk_count` or `document_count` must not be provided"
|
||||
)
|
||||
if "name" not in req:
|
||||
return get_error_data_result(message="`name` is not empty!")
|
||||
req_i = request.json
|
||||
if not isinstance(req_i, dict):
|
||||
return get_error_argument_result(f"Invalid request payload: expected object, got {type(req_i).__name__}")
|
||||
|
||||
try:
|
||||
req_v = CreateDatasetReq(**req_i)
|
||||
except ValidationError as e:
|
||||
return get_error_argument_result(format_validation_error_message(e))
|
||||
|
||||
# Field name transformations during model dump:
|
||||
# | Original | Dump Output |
|
||||
# |----------------|-------------|
|
||||
# | embedding_model| embd_id |
|
||||
# | chunk_method | parser_id |
|
||||
req = req_v.model_dump(by_alias=True)
|
||||
|
||||
try:
|
||||
if KnowledgebaseService.query(name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value):
|
||||
return get_error_argument_result(message=f"Dataset name '{req['name']}' already exists")
|
||||
except OperationalError as e:
|
||||
logging.exception(e)
|
||||
return get_error_data_result(message="Database operation failed")
|
||||
|
||||
req["parser_config"] = get_parser_config(req["parser_id"], req["parser_config"])
|
||||
req["id"] = get_uuid()
|
||||
req["name"] = req["name"].strip()
|
||||
if req["name"] == "":
|
||||
return get_error_data_result(message="`name` is not empty string!")
|
||||
if len(req["name"]) >= 128:
|
||||
return get_error_data_result(
|
||||
message="Dataset name should not be longer than 128 characters."
|
||||
)
|
||||
if KnowledgebaseService.query(
|
||||
name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value
|
||||
):
|
||||
return get_error_data_result(
|
||||
message="Duplicated dataset name in creating dataset."
|
||||
)
|
||||
req["tenant_id"] = tenant_id
|
||||
req["created_by"] = tenant_id
|
||||
if not req.get("embedding_model"):
|
||||
req["embedding_model"] = t.embd_id
|
||||
|
||||
try:
|
||||
ok, t = TenantService.get_by_id(tenant_id)
|
||||
if not ok:
|
||||
return get_error_data_result(message="Tenant not found")
|
||||
except OperationalError as e:
|
||||
logging.exception(e)
|
||||
return get_error_data_result(message="Database operation failed")
|
||||
|
||||
if not req.get("embd_id"):
|
||||
req["embd_id"] = t.embd_id
|
||||
else:
|
||||
valid_embedding_models = [
|
||||
"BAAI/bge-large-zh-v1.5",
|
||||
"maidalun1020/bce-embedding-base_v1",
|
||||
builtin_embedding_models = [
|
||||
"BAAI/bge-large-zh-v1.5@BAAI",
|
||||
"maidalun1020/bce-embedding-base_v1@Youdao",
|
||||
]
|
||||
embd_model = LLMService.query(
|
||||
llm_name=req["embedding_model"], model_type="embedding"
|
||||
)
|
||||
if embd_model:
|
||||
if req["embedding_model"] not in valid_embedding_models and not TenantLLMService.query(tenant_id=tenant_id,model_type="embedding",llm_name=req.get("embedding_model"),):
|
||||
return get_error_data_result(f"`embedding_model` {req.get('embedding_model')} doesn't exist")
|
||||
if not embd_model:
|
||||
embd_model=TenantLLMService.query(tenant_id=tenant_id,model_type="embedding", llm_name=req.get("embedding_model"))
|
||||
if not embd_model:
|
||||
return get_error_data_result(
|
||||
f"`embedding_model` {req.get('embedding_model')} doesn't exist"
|
||||
)
|
||||
is_builtin_model = req["embd_id"] in builtin_embedding_models
|
||||
try:
|
||||
# model name must be model_name@model_factory
|
||||
llm_name, llm_factory = TenantLLMService.split_model_name_and_factory(req["embd_id"])
|
||||
is_tenant_model = TenantLLMService.query(tenant_id=tenant_id, llm_name=llm_name, llm_factory=llm_factory, model_type="embedding")
|
||||
is_supported_model = LLMService.query(llm_name=llm_name, fid=llm_factory, model_type="embedding")
|
||||
if not (is_supported_model and (is_builtin_model or is_tenant_model)):
|
||||
return get_error_argument_result(f"The embedding_model '{req['embd_id']}' is not supported")
|
||||
except OperationalError as e:
|
||||
logging.exception(e)
|
||||
return get_error_data_result(message="Database operation failed")
|
||||
|
||||
try:
|
||||
if not KnowledgebaseService.save(**req):
|
||||
return get_error_data_result(message="Database operation failed")
|
||||
except OperationalError as e:
|
||||
logging.exception(e)
|
||||
return get_error_data_result(message="Database operation failed")
|
||||
|
||||
try:
|
||||
ok, k = KnowledgebaseService.get_by_id(req["id"])
|
||||
if not ok:
|
||||
return get_error_data_result(message="Dataset created failed")
|
||||
except OperationalError as e:
|
||||
logging.exception(e)
|
||||
return get_error_data_result(message="Database operation failed")
|
||||
|
||||
response_data = {}
|
||||
key_mapping = {
|
||||
"chunk_num": "chunk_count",
|
||||
"doc_num": "document_count",
|
||||
"parser_id": "chunk_method",
|
||||
"embd_id": "embedding_model",
|
||||
}
|
||||
mapped_keys = {
|
||||
new_key: req[old_key]
|
||||
for new_key, old_key in key_mapping.items()
|
||||
if old_key in req
|
||||
}
|
||||
req.update(mapped_keys)
|
||||
flds = list(req.keys())
|
||||
for f in flds:
|
||||
if req[f] == "" and f in ["permission", "parser_id", "chunk_method"]:
|
||||
del req[f]
|
||||
if not KnowledgebaseService.save(**req):
|
||||
return get_error_data_result(message="Create dataset error.(Database error)")
|
||||
renamed_data = {}
|
||||
e, k = KnowledgebaseService.get_by_id(req["id"])
|
||||
for key, value in k.to_dict().items():
|
||||
new_key = key_mapping.get(key, key)
|
||||
renamed_data[new_key] = value
|
||||
return get_result(data=renamed_data)
|
||||
response_data[new_key] = value
|
||||
return get_result(data=response_data)
|
||||
|
||||
|
||||
@manager.route("/datasets", methods=["DELETE"]) # noqa: F821
|
||||
@ -254,29 +258,28 @@ def delete(tenant_id):
|
||||
]
|
||||
)
|
||||
File2DocumentService.delete_by_document_id(doc.id)
|
||||
FileService.filter_delete(
|
||||
[File.source_type == FileSource.KNOWLEDGEBASE, File.type == "folder", File.name == kbs[0].name])
|
||||
FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.type == "folder", File.name == kbs[0].name])
|
||||
if not KnowledgebaseService.delete_by_id(id):
|
||||
errors.append(f"Delete dataset error for {id}")
|
||||
continue
|
||||
success_count += 1
|
||||
if errors:
|
||||
if success_count > 0:
|
||||
return get_result(
|
||||
data={"success_count": success_count, "errors": errors},
|
||||
message=f"Partially deleted {success_count} datasets with {len(errors)} errors"
|
||||
)
|
||||
return get_result(data={"success_count": success_count, "errors": errors}, message=f"Partially deleted {success_count} datasets with {len(errors)} errors")
|
||||
else:
|
||||
return get_error_data_result(message="; ".join(errors))
|
||||
if duplicate_messages:
|
||||
if success_count > 0:
|
||||
return get_result(message=f"Partially deleted {success_count} datasets with {len(duplicate_messages)} errors", data={"success_count": success_count, "errors": duplicate_messages},)
|
||||
return get_result(
|
||||
message=f"Partially deleted {success_count} datasets with {len(duplicate_messages)} errors",
|
||||
data={"success_count": success_count, "errors": duplicate_messages},
|
||||
)
|
||||
else:
|
||||
return get_error_data_result(message=";".join(duplicate_messages))
|
||||
return get_result(code=settings.RetCode.SUCCESS)
|
||||
|
||||
|
||||
@manager.route("/datasets/<dataset_id>", methods=["PUT"]) # noqa: F821
|
||||
@manager.route("/datasets/<dataset_id>", methods=["PUT"]) # noqa: F821
|
||||
@token_required
|
||||
def update(tenant_id, dataset_id):
|
||||
"""
|
||||
@ -333,7 +336,7 @@ def update(tenant_id, dataset_id):
|
||||
if dataset_readonly_fields(k):
|
||||
return get_result(code=settings.RetCode.ARGUMENT_ERROR, message=f"'{k}' is readonly.")
|
||||
e, t = TenantService.get_by_id(tenant_id)
|
||||
invalid_keys = {"id", "embd_id", "chunk_num", "doc_num", "parser_id", "create_date", "create_time", "created_by", "status","token_num","update_date","update_time"}
|
||||
invalid_keys = {"id", "embd_id", "chunk_num", "doc_num", "parser_id", "create_date", "create_time", "created_by", "status", "token_num", "update_date", "update_time"}
|
||||
if any(key in req for key in invalid_keys):
|
||||
return get_error_data_result(message="The input parameters are invalid.")
|
||||
permission = req.get("permission")
|
||||
@ -341,20 +344,7 @@ def update(tenant_id, dataset_id):
|
||||
parser_config = req.get("parser_config")
|
||||
valid_parser_config(parser_config)
|
||||
valid_permission = ["me", "team"]
|
||||
valid_chunk_method = [
|
||||
"naive",
|
||||
"manual",
|
||||
"qa",
|
||||
"table",
|
||||
"paper",
|
||||
"book",
|
||||
"laws",
|
||||
"presentation",
|
||||
"picture",
|
||||
"one",
|
||||
"email",
|
||||
"tag"
|
||||
]
|
||||
valid_chunk_method = ["naive", "manual", "qa", "table", "paper", "book", "laws", "presentation", "picture", "one", "email", "tag"]
|
||||
check_validation = valid(
|
||||
permission,
|
||||
valid_permission,
|
||||
@ -381,18 +371,14 @@ def update(tenant_id, dataset_id):
|
||||
req.pop("document_count")
|
||||
if req.get("chunk_method"):
|
||||
if kb.chunk_num != 0 and req["chunk_method"] != kb.parser_id:
|
||||
return get_error_data_result(
|
||||
message="If `chunk_count` is not 0, `chunk_method` is not changeable."
|
||||
)
|
||||
return get_error_data_result(message="If `chunk_count` is not 0, `chunk_method` is not changeable.")
|
||||
req["parser_id"] = req.pop("chunk_method")
|
||||
if req["parser_id"] != kb.parser_id:
|
||||
if not req.get("parser_config"):
|
||||
req["parser_config"] = get_parser_config(chunk_method, parser_config)
|
||||
if "embedding_model" in req:
|
||||
if kb.chunk_num != 0 and req["embedding_model"] != kb.embd_id:
|
||||
return get_error_data_result(
|
||||
message="If `chunk_count` is not 0, `embedding_model` is not changeable."
|
||||
)
|
||||
return get_error_data_result(message="If `chunk_count` is not 0, `embedding_model` is not changeable.")
|
||||
if not req.get("embedding_model"):
|
||||
return get_error_data_result("`embedding_model` can't be empty")
|
||||
valid_embedding_models = [
|
||||
@ -409,38 +395,26 @@ def update(tenant_id, dataset_id):
|
||||
"text-embedding-v3",
|
||||
"maidalun1020/bce-embedding-base_v1",
|
||||
]
|
||||
embd_model = LLMService.query(
|
||||
llm_name=req["embedding_model"], model_type="embedding"
|
||||
)
|
||||
embd_model = LLMService.query(llm_name=req["embedding_model"], model_type="embedding")
|
||||
if embd_model:
|
||||
if req["embedding_model"] not in valid_embedding_models and not TenantLLMService.query(tenant_id=tenant_id,model_type="embedding",llm_name=req.get("embedding_model"),):
|
||||
if req["embedding_model"] not in valid_embedding_models and not TenantLLMService.query(
|
||||
tenant_id=tenant_id,
|
||||
model_type="embedding",
|
||||
llm_name=req.get("embedding_model"),
|
||||
):
|
||||
return get_error_data_result(f"`embedding_model` {req.get('embedding_model')} doesn't exist")
|
||||
if not embd_model:
|
||||
embd_model=TenantLLMService.query(tenant_id=tenant_id,model_type="embedding", llm_name=req.get("embedding_model"))
|
||||
embd_model = TenantLLMService.query(tenant_id=tenant_id, model_type="embedding", llm_name=req.get("embedding_model"))
|
||||
|
||||
if not embd_model:
|
||||
return get_error_data_result(
|
||||
f"`embedding_model` {req.get('embedding_model')} doesn't exist"
|
||||
)
|
||||
return get_error_data_result(f"`embedding_model` {req.get('embedding_model')} doesn't exist")
|
||||
req["embd_id"] = req.pop("embedding_model")
|
||||
if "name" in req:
|
||||
req["name"] = req["name"].strip()
|
||||
if len(req["name"]) >= 128:
|
||||
return get_error_data_result(
|
||||
message="Dataset name should not be longer than 128 characters."
|
||||
)
|
||||
if (
|
||||
req["name"].lower() != kb.name.lower()
|
||||
and len(
|
||||
KnowledgebaseService.query(
|
||||
name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value
|
||||
)
|
||||
)
|
||||
> 0
|
||||
):
|
||||
return get_error_data_result(
|
||||
message="Duplicated dataset name in updating dataset."
|
||||
)
|
||||
return get_error_data_result(message="Dataset name should not be longer than 128 characters.")
|
||||
if req["name"].lower() != kb.name.lower() and len(KnowledgebaseService.query(name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value)) > 0:
|
||||
return get_error_data_result(message="Duplicated dataset name in updating dataset.")
|
||||
flds = list(req.keys())
|
||||
for f in flds:
|
||||
if req[f] == "" and f in ["permission", "parser_id", "chunk_method"]:
|
||||
@ -511,11 +485,11 @@ def list_datasets(tenant_id):
|
||||
id = request.args.get("id")
|
||||
name = request.args.get("name")
|
||||
if id:
|
||||
kbs = KnowledgebaseService.get_kb_by_id(id,tenant_id)
|
||||
kbs = KnowledgebaseService.get_kb_by_id(id, tenant_id)
|
||||
if not kbs:
|
||||
return get_error_data_result(f"You don't own the dataset {id}")
|
||||
if name:
|
||||
kbs = KnowledgebaseService.get_kb_by_name(name,tenant_id)
|
||||
kbs = KnowledgebaseService.get_kb_by_name(name, tenant_id)
|
||||
if not kbs:
|
||||
return get_error_data_result(f"You don't own the dataset {name}")
|
||||
page_number = int(request.args.get("page", 1))
|
||||
|
@ -322,6 +322,10 @@ def get_error_data_result(
|
||||
return jsonify(response)
|
||||
|
||||
|
||||
def get_error_argument_result(message="Invalid arguments"):
|
||||
return get_result(code=settings.RetCode.ARGUMENT_ERROR, message=message)
|
||||
|
||||
|
||||
def generate_confirmation_token(tenant_id):
|
||||
serializer = URLSafeTimedSerializer(tenant_id)
|
||||
return "ragflow-" + serializer.dumps(get_uuid(), salt=tenant_id)[2:34]
|
||||
@ -368,46 +372,34 @@ def get_parser_config(chunk_method, parser_config):
|
||||
return parser_config
|
||||
|
||||
|
||||
def get_data_openai(id=None,
|
||||
created=None,
|
||||
model=None,
|
||||
prompt_tokens= 0,
|
||||
completion_tokens=0,
|
||||
content = None,
|
||||
finish_reason= None,
|
||||
object="chat.completion",
|
||||
param=None,
|
||||
def get_data_openai(
|
||||
id=None,
|
||||
created=None,
|
||||
model=None,
|
||||
prompt_tokens=0,
|
||||
completion_tokens=0,
|
||||
content=None,
|
||||
finish_reason=None,
|
||||
object="chat.completion",
|
||||
param=None,
|
||||
):
|
||||
|
||||
total_tokens= prompt_tokens + completion_tokens
|
||||
total_tokens = prompt_tokens + completion_tokens
|
||||
return {
|
||||
"id":f"{id}",
|
||||
"id": f"{id}",
|
||||
"object": object,
|
||||
"created": int(time.time()) if created else None,
|
||||
"model": model,
|
||||
"param":param,
|
||||
"param": param,
|
||||
"usage": {
|
||||
"prompt_tokens": prompt_tokens,
|
||||
"completion_tokens": completion_tokens,
|
||||
"total_tokens": total_tokens,
|
||||
"completion_tokens_details": {
|
||||
"reasoning_tokens": 0,
|
||||
"accepted_prediction_tokens": 0,
|
||||
"rejected_prediction_tokens": 0
|
||||
}
|
||||
"completion_tokens_details": {"reasoning_tokens": 0, "accepted_prediction_tokens": 0, "rejected_prediction_tokens": 0},
|
||||
},
|
||||
"choices": [
|
||||
{
|
||||
"message": {
|
||||
"role": "assistant",
|
||||
"content": content
|
||||
},
|
||||
"logprobs": None,
|
||||
"finish_reason": finish_reason,
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
"choices": [{"message": {"role": "assistant", "content": content}, "logprobs": None, "finish_reason": finish_reason, "index": 0}],
|
||||
}
|
||||
|
||||
|
||||
def valid_parser_config(parser_config):
|
||||
if not parser_config:
|
||||
return
|
||||
|
162
api/utils/validation_utils.py
Normal file
162
api/utils/validation_utils.py
Normal file
@ -0,0 +1,162 @@
|
||||
#
|
||||
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from enum import auto
|
||||
from typing import Annotated, List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field, StringConstraints, ValidationError, field_validator
|
||||
from strenum import StrEnum
|
||||
|
||||
|
||||
def format_validation_error_message(e: ValidationError):
|
||||
error_messages = []
|
||||
|
||||
for error in e.errors():
|
||||
field = ".".join(map(str, error["loc"]))
|
||||
msg = error["msg"]
|
||||
input_val = error["input"]
|
||||
input_str = str(input_val)
|
||||
|
||||
if len(input_str) > 128:
|
||||
input_str = input_str[:125] + "..."
|
||||
|
||||
error_msg = f"Field: <{field}> - Message: <{msg}> - Value: <{input_str}>"
|
||||
error_messages.append(error_msg)
|
||||
|
||||
return "\n".join(error_messages)
|
||||
|
||||
|
||||
class PermissionEnum(StrEnum):
|
||||
me = auto()
|
||||
team = auto()
|
||||
|
||||
|
||||
class ChunkMethodnEnum(StrEnum):
|
||||
naive = auto()
|
||||
book = auto()
|
||||
email = auto()
|
||||
laws = auto()
|
||||
manual = auto()
|
||||
one = auto()
|
||||
paper = auto()
|
||||
picture = auto()
|
||||
presentation = auto()
|
||||
qa = auto()
|
||||
table = auto()
|
||||
tag = auto()
|
||||
|
||||
|
||||
class GraphragMethodEnum(StrEnum):
|
||||
light = auto()
|
||||
general = auto()
|
||||
|
||||
|
||||
class Base(BaseModel):
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
json_schema_extra = {"charset": "utf8mb4", "collation": "utf8mb4_0900_ai_ci"}
|
||||
|
||||
|
||||
class RaptorConfig(Base):
|
||||
use_raptor: bool = Field(default=False)
|
||||
prompt: Annotated[
|
||||
str,
|
||||
StringConstraints(strip_whitespace=True, min_length=1),
|
||||
Field(
|
||||
default="Please summarize the following paragraphs. Be careful with the numbers, do not make things up. Paragraphs as following:\n {cluster_content}\nThe above is the content you need to summarize."
|
||||
),
|
||||
]
|
||||
max_token: int = Field(default=256, ge=1, le=2048)
|
||||
threshold: float = Field(default=0.1, ge=0.0, le=1.0)
|
||||
max_cluster: int = Field(default=64, ge=1, le=1024)
|
||||
random_seed: int = Field(default=0, ge=0, le=10_000)
|
||||
|
||||
|
||||
class GraphragConfig(Base):
|
||||
use_graphrag: bool = Field(default=False)
|
||||
entity_types: List[str] = Field(default_factory=lambda: ["organization", "person", "geo", "event", "category"])
|
||||
method: GraphragMethodEnum = Field(default=GraphragMethodEnum.light)
|
||||
community: bool = Field(default=False)
|
||||
resolution: bool = Field(default=False)
|
||||
|
||||
|
||||
class ParserConfig(Base):
|
||||
auto_keywords: int = Field(default=0, ge=0, le=32)
|
||||
auto_questions: int = Field(default=0, ge=0, le=10)
|
||||
chunk_token_num: int = Field(default=128, ge=1, le=2048)
|
||||
delimiter: str = Field(default=r"\n!?;。;!?", min_length=1)
|
||||
graphrag: Optional[GraphragConfig] = None
|
||||
html4excel: bool = False
|
||||
layout_recognize: str = "DeepDOC"
|
||||
raptor: Optional[RaptorConfig] = None
|
||||
tag_kb_ids: List[str] = Field(default_factory=list)
|
||||
topn_tags: int = Field(default=1, ge=1, le=10)
|
||||
filename_embd_weight: Optional[float] = Field(default=None, ge=0.0, le=1.0)
|
||||
task_page_size: Optional[int] = Field(default=None, ge=1, le=10_000)
|
||||
pages: Optional[List[List[int]]] = None
|
||||
|
||||
|
||||
class CreateDatasetReq(Base):
|
||||
name: Annotated[str, StringConstraints(strip_whitespace=True, min_length=1, max_length=128), Field(...)]
|
||||
avatar: Optional[str] = Field(default=None, max_length=65535)
|
||||
description: Optional[str] = Field(default=None, max_length=65535)
|
||||
embedding_model: Annotated[Optional[str], StringConstraints(strip_whitespace=True, max_length=255), Field(default=None, serialization_alias="embd_id")]
|
||||
permission: Annotated[PermissionEnum, StringConstraints(strip_whitespace=True, min_length=1, max_length=16), Field(default=PermissionEnum.me)]
|
||||
chunk_method: Annotated[ChunkMethodnEnum, StringConstraints(strip_whitespace=True, min_length=1, max_length=32), Field(default=ChunkMethodnEnum.naive, serialization_alias="parser_id")]
|
||||
pagerank: int = Field(default=0, ge=0, le=100)
|
||||
parser_config: Optional[ParserConfig] = Field(default=None)
|
||||
|
||||
@field_validator("avatar")
|
||||
@classmethod
|
||||
def validate_avatar_base64(cls, v: str) -> str:
|
||||
if v is None:
|
||||
return v
|
||||
|
||||
if "," in v:
|
||||
prefix, _ = v.split(",", 1)
|
||||
if not prefix.startswith("data:"):
|
||||
raise ValueError("Invalid MIME prefix format. Must start with 'data:'")
|
||||
|
||||
mime_type = prefix[5:].split(";")[0]
|
||||
supported_mime_types = ["image/jpeg", "image/png"]
|
||||
if mime_type not in supported_mime_types:
|
||||
raise ValueError(f"Unsupported MIME type. Allowed: {supported_mime_types}")
|
||||
|
||||
return v
|
||||
else:
|
||||
raise ValueError("Missing MIME prefix. Expected format: data:<mime>;base64,<data>")
|
||||
|
||||
@field_validator("embedding_model", mode="after")
|
||||
@classmethod
|
||||
def validate_embedding_model(cls, v: str) -> str:
|
||||
if "@" not in v:
|
||||
raise ValueError("Embedding model must be xxx@yyy")
|
||||
return v
|
||||
|
||||
@field_validator("permission", mode="before")
|
||||
@classmethod
|
||||
def permission_auto_lowercase(cls, v: str) -> str:
|
||||
if isinstance(v, str):
|
||||
return v.lower()
|
||||
return v
|
||||
|
||||
@field_validator("parser_config", mode="after")
|
||||
@classmethod
|
||||
def validate_parser_config_json_length(cls, v: Optional[ParserConfig]) -> Optional[ParserConfig]:
|
||||
if v is not None:
|
||||
json_str = v.model_dump_json()
|
||||
if len(json_str) > 65535:
|
||||
raise ValueError("Parser config have at most 65535 characters")
|
||||
return v
|
@ -341,6 +341,7 @@ Creates a dataset.
|
||||
- `"embedding_model"`: `string`
|
||||
- `"permission"`: `string`
|
||||
- `"chunk_method"`: `string`
|
||||
- `"pagerank"`: `int`
|
||||
- `"parser_config"`: `object`
|
||||
|
||||
##### Request example
|
||||
@ -359,53 +360,83 @@ curl --request POST \
|
||||
|
||||
- `"name"`: (*Body parameter*), `string`, *Required*
|
||||
The unique name of the dataset to create. It must adhere to the following requirements:
|
||||
- Permitted characters include:
|
||||
- English letters (a-z, A-Z)
|
||||
- Digits (0-9)
|
||||
- "_" (underscore)
|
||||
- Must begin with an English letter or underscore.
|
||||
- Maximum 65,535 characters.
|
||||
- Case-insensitive.
|
||||
- Basic Multilingual Plane (BMP) only
|
||||
- Maximum 128 characters
|
||||
- Case-insensitive
|
||||
|
||||
- `"avatar"`: (*Body parameter*), `string`
|
||||
Base64 encoding of the avatar.
|
||||
- Maximum 65535 characters
|
||||
|
||||
- `"description"`: (*Body parameter*), `string`
|
||||
A brief description of the dataset to create.
|
||||
- Maximum 65535 characters
|
||||
|
||||
- `"embedding_model"`: (*Body parameter*), `string`
|
||||
The name of the embedding model to use. For example: `"BAAI/bge-zh-v1.5"`
|
||||
The name of the embedding model to use. For example: `"BAAI/bge-large-zh-v1.5@BAAI"`
|
||||
- Maximum 255 characters
|
||||
- Must follow `model_name@model_factory` format
|
||||
|
||||
- `"permission"`: (*Body parameter*), `string`
|
||||
Specifies who can access the dataset to create. Available options:
|
||||
- `"me"`: (Default) Only you can manage the dataset.
|
||||
- `"team"`: All team members can manage the dataset.
|
||||
|
||||
- `"pagerank"`: (*Body parameter*), `int`
|
||||
Set page rank: refer to [Set page rank](https://ragflow.io/docs/dev/set_page_rank)
|
||||
- Default: `0`
|
||||
- Minimum: `0`
|
||||
- Maximum: `100`
|
||||
|
||||
- `"chunk_method"`: (*Body parameter*), `enum<string>`
|
||||
The chunking method of the dataset to create. Available options:
|
||||
- `"naive"`: General (default)
|
||||
- `"book"`: Book
|
||||
- `"email"`: Email
|
||||
- `"laws"`: Laws
|
||||
- `"manual"`: Manual
|
||||
- `"one"`: One
|
||||
- `"paper"`: Paper
|
||||
- `"picture"`: Picture
|
||||
- `"presentation"`: Presentation
|
||||
- `"qa"`: Q&A
|
||||
- `"table"`: Table
|
||||
- `"paper"`: Paper
|
||||
- `"book"`: Book
|
||||
- `"laws"`: Laws
|
||||
- `"presentation"`: Presentation
|
||||
- `"picture"`: Picture
|
||||
- `"one"`: One
|
||||
- `"email"`: Email
|
||||
- `"tag"`: Tag
|
||||
|
||||
- `"parser_config"`: (*Body parameter*), `object`
|
||||
The configuration settings for the dataset parser. The attributes in this JSON object vary with the selected `"chunk_method"`:
|
||||
- If `"chunk_method"` is `"naive"`, the `"parser_config"` object contains the following attributes:
|
||||
- `"chunk_token_count"`: Defaults to `128`.
|
||||
- `"layout_recognize"`: Defaults to `true`.
|
||||
- `"html4excel"`: Indicates whether to convert Excel documents into HTML format. Defaults to `false`.
|
||||
- `"delimiter"`: Defaults to `"\n"`.
|
||||
- `"task_page_size"`: Defaults to `12`. For PDF only.
|
||||
- `"raptor"`: RAPTOR-specific settings. Defaults to: `{"use_raptor": false}`.
|
||||
- `"auto_keywords"`: `int`
|
||||
- Defaults to `0`
|
||||
- Minimum: `0`
|
||||
- Maximum: `32`
|
||||
- `"auto_questions"`: `int`
|
||||
- Defaults to `0`
|
||||
- Minimum: `0`
|
||||
- Maximum: `10`
|
||||
- `"chunk_token_num"`: `int`
|
||||
- Defaults to `128`
|
||||
- Minimum: `1`
|
||||
- Maximum: `2048`
|
||||
- `"delimiter"`: `string`
|
||||
- Defaults to `"\n"`.
|
||||
- `"html4excel"`: `bool` Indicates whether to convert Excel documents into HTML format.
|
||||
- Defaults to `false`
|
||||
- `"layout_recognize"`: `string`
|
||||
- Defaults to `DeepDOC`
|
||||
- `"tag_kb_ids"`: `array<string>` refer to [Use tag set](https://ragflow.io/docs/dev/use_tag_sets)
|
||||
- Must include a list of dataset IDs, where each dataset is parsed using the Tag Chunk Method
|
||||
- `"task_page_size"`: `int` For PDF only.
|
||||
- Defaults to `12`
|
||||
- Minimum: `1`
|
||||
- Maximum: `10000`
|
||||
- `"raptor"`: `object` RAPTOR-specific settings.
|
||||
- Defaults to: `{"use_raptor": false}`
|
||||
- `"graphrag"`: `object` GRAPHRAG-specific settings.
|
||||
- Defaults to: `{"use_graphrag": false}`
|
||||
- If `"chunk_method"` is `"qa"`, `"manuel"`, `"paper"`, `"book"`, `"laws"`, or `"presentation"`, the `"parser_config"` object contains the following attribute:
|
||||
- `"raptor"`: RAPTOR-specific settings. Defaults to: `{"use_raptor": false}`.
|
||||
- `"raptor"`: `object` RAPTOR-specific settings.
|
||||
- Defaults to: `{"use_raptor": false}`.
|
||||
- If `"chunk_method"` is `"table"`, `"picture"`, `"one"`, or `"email"`, `"parser_config"` is an empty JSON object.
|
||||
|
||||
#### Response
|
||||
@ -419,33 +450,34 @@ Success:
|
||||
"avatar": null,
|
||||
"chunk_count": 0,
|
||||
"chunk_method": "naive",
|
||||
"create_date": "Thu, 24 Oct 2024 09:14:07 GMT",
|
||||
"create_time": 1729761247434,
|
||||
"created_by": "69736c5e723611efb51b0242ac120007",
|
||||
"create_date": "Mon, 28 Apr 2025 18:40:41 GMT",
|
||||
"create_time": 1745836841611,
|
||||
"created_by": "3af81804241d11f0a6a79f24fc270c7f",
|
||||
"description": null,
|
||||
"document_count": 0,
|
||||
"embedding_model": "BAAI/bge-large-zh-v1.5",
|
||||
"id": "527fa74891e811ef9c650242ac120006",
|
||||
"embedding_model": "BAAI/bge-large-zh-v1.5@BAAI",
|
||||
"id": "3b4de7d4241d11f0a6a79f24fc270c7f",
|
||||
"language": "English",
|
||||
"name": "test_1",
|
||||
"name": "RAGFlow example",
|
||||
"pagerank": 0,
|
||||
"parser_config": {
|
||||
"chunk_token_num": 128,
|
||||
"delimiter": "\\n",
|
||||
"html4excel": false,
|
||||
"layout_recognize": true,
|
||||
"chunk_token_num": 128,
|
||||
"delimiter": "\\n!?;。;!?",
|
||||
"html4excel": false,
|
||||
"layout_recognize": "DeepDOC",
|
||||
"raptor": {
|
||||
"use_raptor": false
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
"permission": "me",
|
||||
"similarity_threshold": 0.2,
|
||||
"status": "1",
|
||||
"tenant_id": "69736c5e723611efb51b0242ac120007",
|
||||
"tenant_id": "3af81804241d11f0a6a79f24fc270c7f",
|
||||
"token_num": 0,
|
||||
"update_date": "Thu, 24 Oct 2024 09:14:07 GMT",
|
||||
"update_time": 1729761247434,
|
||||
"vector_similarity_weight": 0.3
|
||||
}
|
||||
"update_date": "Mon, 28 Apr 2025 18:40:41 GMT",
|
||||
"update_time": 1745836841611,
|
||||
"vector_similarity_weight": 0.3,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
@ -453,8 +485,8 @@ Failure:
|
||||
|
||||
```json
|
||||
{
|
||||
"code": 102,
|
||||
"message": "Duplicated knowledgebase name in creating dataset."
|
||||
"code": 101,
|
||||
"message": "Dataset name 'RAGFlow example' already exists"
|
||||
}
|
||||
```
|
||||
|
||||
|
@ -95,11 +95,12 @@ else:
|
||||
```python
|
||||
RAGFlow.create_dataset(
|
||||
name: str,
|
||||
avatar: str = "",
|
||||
description: str = "",
|
||||
embedding_model: str = "BAAI/bge-large-zh-v1.5",
|
||||
avatar: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
embedding_model: Optional[str] = "BAAI/bge-large-zh-v1.5@BAAI",
|
||||
permission: str = "me",
|
||||
chunk_method: str = "naive",
|
||||
pagerank: int = 0,
|
||||
parser_config: DataSet.ParserConfig = None
|
||||
) -> DataSet
|
||||
```
|
||||
@ -112,16 +113,16 @@ Creates a dataset.
|
||||
|
||||
The unique name of the dataset to create. It must adhere to the following requirements:
|
||||
|
||||
- Maximum 65,535 characters.
|
||||
- Maximum 128 characters.
|
||||
- Case-insensitive.
|
||||
|
||||
##### avatar: `str`
|
||||
|
||||
Base64 encoding of the avatar. Defaults to `""`
|
||||
Base64 encoding of the avatar. Defaults to `None`
|
||||
|
||||
##### description: `str`
|
||||
|
||||
A brief description of the dataset to create. Defaults to `""`.
|
||||
A brief description of the dataset to create. Defaults to `None`.
|
||||
|
||||
|
||||
##### permission
|
||||
@ -147,6 +148,10 @@ The chunking method of the dataset to create. Available options:
|
||||
- `"one"`: One
|
||||
- `"email"`: Email
|
||||
|
||||
##### pagerank, `int`
|
||||
|
||||
The pagerank of the dataset to create. Defaults to `0`.
|
||||
|
||||
##### parser_config
|
||||
|
||||
The parser configuration of the dataset. A `ParserConfig` object's attributes vary based on the selected `chunk_method`:
|
||||
|
@ -2,31 +2,26 @@
|
||||
name = "ragflow-sdk"
|
||||
version = "0.18.0"
|
||||
description = "Python client sdk of [RAGFlow](https://github.com/infiniflow/ragflow). RAGFlow is an open-source RAG (Retrieval-Augmented Generation) engine based on deep document understanding."
|
||||
authors = [
|
||||
{ name = "Zhichang Yu", email = "yuzhichang@gmail.com" }
|
||||
]
|
||||
authors = [{ name = "Zhichang Yu", email = "yuzhichang@gmail.com" }]
|
||||
license = { text = "Apache License, Version 2.0" }
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10,<3.13"
|
||||
dependencies = [
|
||||
"requests>=2.30.0,<3.0.0",
|
||||
"beartype>=0.18.5,<0.19.0",
|
||||
"pytest>=8.0.0,<9.0.0",
|
||||
"requests-toolbelt>=1.0.0",
|
||||
"python-docx>=1.1.2",
|
||||
dependencies = ["requests>=2.30.0,<3.0.0", "beartype>=0.18.5,<0.19.0"]
|
||||
|
||||
|
||||
[dependency-groups]
|
||||
test = [
|
||||
"hypothesis>=6.131.9",
|
||||
"openpyxl>=3.1.5",
|
||||
"python-pptx>=1.0.2",
|
||||
"pillow>=11.1.0",
|
||||
"pytest>=8.3.5",
|
||||
"python-docx>=1.1.2",
|
||||
"python-pptx>=1.0.2",
|
||||
"reportlab>=4.3.1",
|
||||
"requests>=2.32.3",
|
||||
"requests-toolbelt>=1.0.0",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
test = [
|
||||
"pytest>=8.0.0,<9.0.0"
|
||||
]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
markers = [
|
||||
"slow: marks tests as slow (deselect with '-m \"not slow\"')",
|
||||
"wip: marks tests as work in progress (deselect with '-m \"not wip\"')"
|
||||
]
|
||||
markers = ["slow: marks tests as slow (deselect with '-m \"not slow\"')"]
|
||||
|
@ -13,16 +13,18 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
|
||||
from .modules.agent import Agent
|
||||
from .modules.chat import Chat
|
||||
from .modules.chunk import Chunk
|
||||
from .modules.dataset import DataSet
|
||||
from .modules.agent import Agent
|
||||
|
||||
|
||||
class RAGFlow:
|
||||
def __init__(self, api_key, base_url, version='v1'):
|
||||
def __init__(self, api_key, base_url, version="v1"):
|
||||
"""
|
||||
api_url: http://<host_address>/api/v1
|
||||
"""
|
||||
@ -31,11 +33,11 @@ class RAGFlow:
|
||||
self.authorization_header = {"Authorization": "{} {}".format("Bearer", self.user_key)}
|
||||
|
||||
def post(self, path, json=None, stream=False, files=None):
|
||||
res = requests.post(url=self.api_url + path, json=json, headers=self.authorization_header, stream=stream,files=files)
|
||||
res = requests.post(url=self.api_url + path, json=json, headers=self.authorization_header, stream=stream, files=files)
|
||||
return res
|
||||
|
||||
def get(self, path, params=None, json=None):
|
||||
res = requests.get(url=self.api_url + path, params=params, headers=self.authorization_header,json=json)
|
||||
res = requests.get(url=self.api_url + path, params=params, headers=self.authorization_header, json=json)
|
||||
return res
|
||||
|
||||
def delete(self, path, json):
|
||||
@ -43,54 +45,73 @@ class RAGFlow:
|
||||
return res
|
||||
|
||||
def put(self, path, json):
|
||||
res = requests.put(url=self.api_url + path, json= json,headers=self.authorization_header)
|
||||
res = requests.put(url=self.api_url + path, json=json, headers=self.authorization_header)
|
||||
return res
|
||||
|
||||
def create_dataset(self, name: str, avatar: str = "", description: str = "", embedding_model:str = "BAAI/bge-large-zh-v1.5",
|
||||
language: str = "English",
|
||||
permission: str = "me",chunk_method: str = "naive",
|
||||
parser_config: DataSet.ParserConfig = None) -> DataSet:
|
||||
def create_dataset(
|
||||
self,
|
||||
name: str,
|
||||
avatar: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
embedding_model: Optional[str] = "BAAI/bge-large-zh-v1.5@BAAI",
|
||||
permission: str = "me",
|
||||
chunk_method: str = "naive",
|
||||
pagerank: int = 0,
|
||||
parser_config: DataSet.ParserConfig = None,
|
||||
) -> DataSet:
|
||||
if parser_config:
|
||||
parser_config = parser_config.to_json()
|
||||
res = self.post("/datasets",
|
||||
{"name": name, "avatar": avatar, "description": description,"embedding_model":embedding_model,
|
||||
"language": language,
|
||||
"permission": permission, "chunk_method": chunk_method,
|
||||
"parser_config": parser_config
|
||||
}
|
||||
)
|
||||
res = self.post(
|
||||
"/datasets",
|
||||
{
|
||||
"name": name,
|
||||
"avatar": avatar,
|
||||
"description": description,
|
||||
"embedding_model": embedding_model,
|
||||
"permission": permission,
|
||||
"chunk_method": chunk_method,
|
||||
"pagerank": pagerank,
|
||||
"parser_config": parser_config,
|
||||
},
|
||||
)
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
return DataSet(self, res["data"])
|
||||
raise Exception(res["message"])
|
||||
|
||||
def delete_datasets(self, ids: list[str] | None = None):
|
||||
res = self.delete("/datasets",{"ids": ids})
|
||||
res=res.json()
|
||||
res = self.delete("/datasets", {"ids": ids})
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res["message"])
|
||||
|
||||
def get_dataset(self,name: str):
|
||||
def get_dataset(self, name: str):
|
||||
_list = self.list_datasets(name=name)
|
||||
if len(_list) > 0:
|
||||
return _list[0]
|
||||
raise Exception("Dataset %s not found" % name)
|
||||
|
||||
def list_datasets(self, page: int = 1, page_size: int = 30, orderby: str = "create_time", desc: bool = True,
|
||||
id: str | None = None, name: str | None = None) -> \
|
||||
list[DataSet]:
|
||||
res = self.get("/datasets",
|
||||
{"page": page, "page_size": page_size, "orderby": orderby, "desc": desc, "id": id, "name": name})
|
||||
def list_datasets(self, page: int = 1, page_size: int = 30, orderby: str = "create_time", desc: bool = True, id: str | None = None, name: str | None = None) -> list[DataSet]:
|
||||
res = self.get(
|
||||
"/datasets",
|
||||
{
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"orderby": orderby,
|
||||
"desc": desc,
|
||||
"id": id,
|
||||
"name": name,
|
||||
},
|
||||
)
|
||||
res = res.json()
|
||||
result_list = []
|
||||
if res.get("code") == 0:
|
||||
for data in res['data']:
|
||||
for data in res["data"]:
|
||||
result_list.append(DataSet(self, data))
|
||||
return result_list
|
||||
raise Exception(res["message"])
|
||||
|
||||
def create_chat(self, name: str, avatar: str = "", dataset_ids=None,
|
||||
llm: Chat.LLM | None = None, prompt: Chat.Prompt | None = None) -> Chat:
|
||||
def create_chat(self, name: str, avatar: str = "", dataset_ids=None, llm: Chat.LLM | None = None, prompt: Chat.Prompt | None = None) -> Chat:
|
||||
if dataset_ids is None:
|
||||
dataset_ids = []
|
||||
dataset_list = []
|
||||
@ -98,25 +119,33 @@ class RAGFlow:
|
||||
dataset_list.append(id)
|
||||
|
||||
if llm is None:
|
||||
llm = Chat.LLM(self, {"model_name": None,
|
||||
"temperature": 0.1,
|
||||
"top_p": 0.3,
|
||||
"presence_penalty": 0.4,
|
||||
"frequency_penalty": 0.7,
|
||||
"max_tokens": 512, })
|
||||
llm = Chat.LLM(
|
||||
self,
|
||||
{
|
||||
"model_name": None,
|
||||
"temperature": 0.1,
|
||||
"top_p": 0.3,
|
||||
"presence_penalty": 0.4,
|
||||
"frequency_penalty": 0.7,
|
||||
"max_tokens": 512,
|
||||
},
|
||||
)
|
||||
if prompt is None:
|
||||
prompt = Chat.Prompt(self, {"similarity_threshold": 0.2,
|
||||
"keywords_similarity_weight": 0.7,
|
||||
"top_n": 8,
|
||||
"top_k": 1024,
|
||||
"variables": [{
|
||||
"key": "knowledge",
|
||||
"optional": True
|
||||
}], "rerank_model": "",
|
||||
"empty_response": None,
|
||||
"opener": None,
|
||||
"show_quote": True,
|
||||
"prompt": None})
|
||||
prompt = Chat.Prompt(
|
||||
self,
|
||||
{
|
||||
"similarity_threshold": 0.2,
|
||||
"keywords_similarity_weight": 0.7,
|
||||
"top_n": 8,
|
||||
"top_k": 1024,
|
||||
"variables": [{"key": "knowledge", "optional": True}],
|
||||
"rerank_model": "",
|
||||
"empty_response": None,
|
||||
"opener": None,
|
||||
"show_quote": True,
|
||||
"prompt": None,
|
||||
},
|
||||
)
|
||||
if prompt.opener is None:
|
||||
prompt.opener = "Hi! I'm your assistant, what can I do for you?"
|
||||
if prompt.prompt is None:
|
||||
@ -127,70 +156,93 @@ class RAGFlow:
|
||||
"Answers need to consider chat history.\nHere is the knowledge base:\n{knowledge}\nThe above is the knowledge base."
|
||||
)
|
||||
|
||||
temp_dict = {"name": name,
|
||||
"avatar": avatar,
|
||||
"dataset_ids": dataset_list if dataset_list else [],
|
||||
"llm": llm.to_json(),
|
||||
"prompt": prompt.to_json()}
|
||||
temp_dict = {"name": name, "avatar": avatar, "dataset_ids": dataset_list if dataset_list else [], "llm": llm.to_json(), "prompt": prompt.to_json()}
|
||||
res = self.post("/chats", temp_dict)
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
return Chat(self, res["data"])
|
||||
raise Exception(res["message"])
|
||||
|
||||
def delete_chats(self,ids: list[str] | None = None):
|
||||
res = self.delete('/chats',
|
||||
{"ids":ids})
|
||||
def delete_chats(self, ids: list[str] | None = None):
|
||||
res = self.delete("/chats", {"ids": ids})
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res["message"])
|
||||
|
||||
def list_chats(self, page: int = 1, page_size: int = 30, orderby: str = "create_time", desc: bool = True,
|
||||
id: str | None = None, name: str | None = None) -> list[Chat]:
|
||||
res = self.get("/chats",{"page": page, "page_size": page_size, "orderby": orderby, "desc": desc, "id": id, "name": name})
|
||||
def list_chats(self, page: int = 1, page_size: int = 30, orderby: str = "create_time", desc: bool = True, id: str | None = None, name: str | None = None) -> list[Chat]:
|
||||
res = self.get(
|
||||
"/chats",
|
||||
{
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"orderby": orderby,
|
||||
"desc": desc,
|
||||
"id": id,
|
||||
"name": name,
|
||||
},
|
||||
)
|
||||
res = res.json()
|
||||
result_list = []
|
||||
if res.get("code") == 0:
|
||||
for data in res['data']:
|
||||
for data in res["data"]:
|
||||
result_list.append(Chat(self, data))
|
||||
return result_list
|
||||
raise Exception(res["message"])
|
||||
|
||||
def retrieve(
|
||||
self,
|
||||
dataset_ids,
|
||||
document_ids=None,
|
||||
question="",
|
||||
page=1,
|
||||
page_size=30,
|
||||
similarity_threshold=0.2,
|
||||
vector_similarity_weight=0.3,
|
||||
top_k=1024,
|
||||
rerank_id: str | None = None,
|
||||
keyword: bool = False,
|
||||
):
|
||||
if document_ids is None:
|
||||
document_ids = []
|
||||
data_json = {
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"similarity_threshold": similarity_threshold,
|
||||
"vector_similarity_weight": vector_similarity_weight,
|
||||
"top_k": top_k,
|
||||
"rerank_id": rerank_id,
|
||||
"keyword": keyword,
|
||||
"question": question,
|
||||
"dataset_ids": dataset_ids,
|
||||
"document_ids": document_ids,
|
||||
}
|
||||
# Send a POST request to the backend service (using requests library as an example, actual implementation may vary)
|
||||
res = self.post("/retrieval", json=data_json)
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
chunks = []
|
||||
for chunk_data in res["data"].get("chunks"):
|
||||
chunk = Chunk(self, chunk_data)
|
||||
chunks.append(chunk)
|
||||
return chunks
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
def retrieve(self, dataset_ids, document_ids=None, question="", page=1, page_size=30, similarity_threshold=0.2, vector_similarity_weight=0.3, top_k=1024, rerank_id: str | None = None, keyword:bool=False, ):
|
||||
if document_ids is None:
|
||||
document_ids = []
|
||||
data_json ={
|
||||
def list_agents(self, page: int = 1, page_size: int = 30, orderby: str = "update_time", desc: bool = True, id: str | None = None, title: str | None = None) -> list[Agent]:
|
||||
res = self.get(
|
||||
"/agents",
|
||||
{
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"similarity_threshold": similarity_threshold,
|
||||
"vector_similarity_weight": vector_similarity_weight,
|
||||
"top_k": top_k,
|
||||
"rerank_id": rerank_id,
|
||||
"keyword": keyword,
|
||||
"question": question,
|
||||
"dataset_ids": dataset_ids,
|
||||
"document_ids": document_ids
|
||||
}
|
||||
# Send a POST request to the backend service (using requests library as an example, actual implementation may vary)
|
||||
res = self.post('/retrieval',json=data_json)
|
||||
res = res.json()
|
||||
if res.get("code") ==0:
|
||||
chunks=[]
|
||||
for chunk_data in res["data"].get("chunks"):
|
||||
chunk=Chunk(self,chunk_data)
|
||||
chunks.append(chunk)
|
||||
return chunks
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
|
||||
def list_agents(self, page: int = 1, page_size: int = 30, orderby: str = "update_time", desc: bool = True,
|
||||
id: str | None = None, title: str | None = None) -> list[Agent]:
|
||||
res = self.get("/agents",{"page": page, "page_size": page_size, "orderby": orderby, "desc": desc, "id": id, "title": title})
|
||||
"orderby": orderby,
|
||||
"desc": desc,
|
||||
"id": id,
|
||||
"title": title,
|
||||
},
|
||||
)
|
||||
res = res.json()
|
||||
result_list = []
|
||||
if res.get("code") == 0:
|
||||
for data in res['data']:
|
||||
for data in res["data"]:
|
||||
result_list.append(Agent(self, data))
|
||||
return result_list
|
||||
raise Exception(res["message"])
|
||||
|
@ -14,13 +14,26 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
import hypothesis.strategies as st
|
||||
import pytest
|
||||
from common import DATASET_NAME_LIMIT, INVALID_API_TOKEN, create_dataset
|
||||
from hypothesis import example, given, settings
|
||||
from libs.auth import RAGFlowHttpApiAuth
|
||||
from libs.utils import encode_avatar
|
||||
from libs.utils.file_utils import create_image_file
|
||||
|
||||
|
||||
@st.composite
|
||||
def valid_names(draw):
|
||||
base_chars = "abcdefghijklmnopqrstuvwxyz_"
|
||||
first_char = draw(st.sampled_from([c for c in base_chars if c.isalpha() or c == "_"]))
|
||||
remaining = draw(st.text(alphabet=st.sampled_from(base_chars), min_size=0, max_size=DATASET_NAME_LIMIT - 2))
|
||||
|
||||
name = (first_char + remaining)[:128]
|
||||
return name.encode("utf-8").decode("utf-8")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("clear_datasets")
|
||||
class TestAuthorization:
|
||||
@pytest.mark.parametrize(
|
||||
@ -33,6 +46,7 @@ class TestAuthorization:
|
||||
"Authentication error: API key is invalid!",
|
||||
),
|
||||
],
|
||||
ids=["empty_auth", "invalid_api_token"],
|
||||
)
|
||||
def test_invalid_auth(self, auth, expected_code, expected_message):
|
||||
res = create_dataset(auth, {"name": "auth_test"})
|
||||
@ -42,31 +56,509 @@ class TestAuthorization:
|
||||
|
||||
@pytest.mark.usefixtures("clear_datasets")
|
||||
class TestDatasetCreation:
|
||||
@given(name=valid_names())
|
||||
@example("a" * 128)
|
||||
@settings(max_examples=20)
|
||||
def test_valid_name(self, get_http_api_auth, name):
|
||||
res = create_dataset(get_http_api_auth, {"name": name})
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"]["name"] == name, res
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"payload, expected_code",
|
||||
"name, expected_message",
|
||||
[
|
||||
({"name": "valid_name"}, 0),
|
||||
({"name": "a" * (DATASET_NAME_LIMIT + 1)}, 102),
|
||||
({"name": 0}, 100),
|
||||
({"name": ""}, 102),
|
||||
({"name": "duplicated_name"}, 102),
|
||||
({"name": "case_insensitive"}, 102),
|
||||
("", "String should have at least 1 character"),
|
||||
(" ", "String should have at least 1 character"),
|
||||
("a" * (DATASET_NAME_LIMIT + 1), "String should have at most 128 characters"),
|
||||
(0, "Input should be a valid string"),
|
||||
],
|
||||
ids=["empty_name", "space_name", "too_long_name", "invalid_name"],
|
||||
)
|
||||
def test_basic_scenarios(self, get_http_api_auth, payload, expected_code):
|
||||
if payload["name"] == "duplicated_name":
|
||||
create_dataset(get_http_api_auth, payload)
|
||||
elif payload["name"] == "case_insensitive":
|
||||
create_dataset(get_http_api_auth, {"name": payload["name"].upper()})
|
||||
def test_invalid_name(self, get_http_api_auth, name, expected_message):
|
||||
res = create_dataset(get_http_api_auth, {"name": name})
|
||||
assert res["code"] == 101, res
|
||||
assert expected_message in res["message"], res
|
||||
|
||||
def test_duplicated_name(self, get_http_api_auth):
|
||||
name = "duplicated_name"
|
||||
payload = {"name": name}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 0, res
|
||||
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 101, res
|
||||
assert res["message"] == f"Dataset name '{name}' already exists", res
|
||||
|
||||
assert res["code"] == expected_code
|
||||
if expected_code == 0:
|
||||
assert res["data"]["name"] == payload["name"]
|
||||
def test_case_insensitive(self, get_http_api_auth):
|
||||
name = "CaseInsensitive"
|
||||
res = create_dataset(get_http_api_auth, {"name": name.upper()})
|
||||
assert res["code"] == 0, res
|
||||
|
||||
if payload["name"] in ["duplicated_name", "case_insensitive"]:
|
||||
assert res["message"] == "Duplicated dataset name in creating dataset."
|
||||
res = create_dataset(get_http_api_auth, {"name": name.lower()})
|
||||
assert res["code"] == 101, res
|
||||
assert res["message"] == f"Dataset name '{name.lower()}' already exists", res
|
||||
|
||||
def test_avatar(self, get_http_api_auth, tmp_path):
|
||||
fn = create_image_file(tmp_path / "ragflow_test.png")
|
||||
payload = {
|
||||
"name": "avatar_test",
|
||||
"avatar": f"data:image/png;base64,{encode_avatar(fn)}",
|
||||
}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 0, res
|
||||
|
||||
def test_avatar_none(self, get_http_api_auth, tmp_path):
|
||||
payload = {"name": "test_avatar_none", "avatar": None}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"]["avatar"] is None, res
|
||||
|
||||
def test_avatar_exceeds_limit_length(self, get_http_api_auth):
|
||||
res = create_dataset(get_http_api_auth, {"name": "exceeds_limit_length_avatar", "avatar": "a" * 65536})
|
||||
assert res["code"] == 101, res
|
||||
assert "String should have at most 65535 characters" in res["message"], res
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, avatar_prefix, expected_message",
|
||||
[
|
||||
("empty_prefix", "", "Missing MIME prefix. Expected format: data:<mime>;base64,<data>"),
|
||||
("missing_comma", "data:image/png;base64", "Missing MIME prefix. Expected format: data:<mime>;base64,<data>"),
|
||||
("unsupported_mine_type", "invalid_mine_prefix:image/png;base64,", "Invalid MIME prefix format. Must start with 'data:'"),
|
||||
("invalid_mine_type", "data:unsupported_mine_type;base64,", "Unsupported MIME type. Allowed: ['image/jpeg', 'image/png']"),
|
||||
],
|
||||
ids=["empty_prefix", "missing_comma", "unsupported_mine_type", "invalid_mine_type"],
|
||||
)
|
||||
def test_invalid_avatar_prefix(self, get_http_api_auth, tmp_path, name, avatar_prefix, expected_message):
|
||||
fn = create_image_file(tmp_path / "ragflow_test.png")
|
||||
payload = {
|
||||
"name": name,
|
||||
"avatar": f"{avatar_prefix}{encode_avatar(fn)}",
|
||||
}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 101, res
|
||||
assert expected_message in res["message"], res
|
||||
|
||||
def test_description_none(self, get_http_api_auth):
|
||||
payload = {"name": "test_description_none", "description": None}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 0, res
|
||||
assert res["data"]["description"] is None, res
|
||||
|
||||
def test_description_exceeds_limit_length(self, get_http_api_auth):
|
||||
payload = {"name": "exceeds_limit_length_description", "description": "a" * 65536}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 101, res
|
||||
assert "String should have at most 65535 characters" in res["message"], res
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, embedding_model",
|
||||
[
|
||||
("BAAI/bge-large-zh-v1.5@BAAI", "BAAI/bge-large-zh-v1.5@BAAI"),
|
||||
("maidalun1020/bce-embedding-base_v1@Youdao", "maidalun1020/bce-embedding-base_v1@Youdao"),
|
||||
("embedding-3@ZHIPU-AI", "embedding-3@ZHIPU-AI"),
|
||||
("embedding_model_default", None),
|
||||
],
|
||||
ids=["builtin_baai", "builtin_youdao", "tenant__zhipu", "default"],
|
||||
)
|
||||
def test_valid_embedding_model(self, get_http_api_auth, name, embedding_model):
|
||||
if embedding_model is None:
|
||||
payload = {"name": name}
|
||||
else:
|
||||
payload = {"name": name, "embedding_model": embedding_model}
|
||||
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 0, res
|
||||
if embedding_model is None:
|
||||
assert res["data"]["embedding_model"] == "BAAI/bge-large-zh-v1.5@BAAI", res
|
||||
else:
|
||||
assert res["data"]["embedding_model"] == embedding_model, res
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, embedding_model",
|
||||
[
|
||||
("unknown_llm_name", "unknown@ZHIPU-AI"),
|
||||
("unknown_llm_factory", "embedding-3@unknown"),
|
||||
("tenant_no_auth", "deepseek-chat@DeepSeek"),
|
||||
],
|
||||
ids=["unknown_llm_name", "unknown_llm_factory", "tenant_no_auth"],
|
||||
)
|
||||
def test_invalid_embedding_model(self, get_http_api_auth, name, embedding_model):
|
||||
payload = {"name": name, "embedding_model": embedding_model}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 101, res
|
||||
assert res["message"] == f"The embedding_model '{embedding_model}' is not supported", res
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, embedding_model",
|
||||
[
|
||||
("builtin_missing_at", "BAAI/bge-large-zh-v1.5"),
|
||||
("tenant_missing_at", "embedding-3ZHIPU-AI"),
|
||||
],
|
||||
ids=["builtin_missing_at", "tenant_missing_at"],
|
||||
)
|
||||
def test_embedding_model_missing_at(self, get_http_api_auth, name, embedding_model):
|
||||
payload = {"name": name, "embedding_model": embedding_model}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 101, res
|
||||
assert "Embedding model must be xxx@yyy" in res["message"], res
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, permission",
|
||||
[
|
||||
("me", "me"),
|
||||
("team", "team"),
|
||||
("me_upercase", "ME"),
|
||||
("team_upercase", "TEAM"),
|
||||
("permission_default", None),
|
||||
],
|
||||
ids=["me", "team", "me_upercase", "team_upercase", "permission_default"],
|
||||
)
|
||||
def test_valid_permission(self, get_http_api_auth, name, permission):
|
||||
if permission is None:
|
||||
payload = {"name": name}
|
||||
else:
|
||||
payload = {"name": name, "permission": permission}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 0, res
|
||||
if permission is None:
|
||||
assert res["data"]["permission"] == "me", res
|
||||
else:
|
||||
assert res["data"]["permission"] == permission.lower(), res
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, permission",
|
||||
[
|
||||
("empty", ""),
|
||||
("unknown", "unknown"),
|
||||
("type_error", list()),
|
||||
],
|
||||
)
|
||||
def test_invalid_permission(self, get_http_api_auth, name, permission):
|
||||
payload = {"name": name, "permission": permission}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 101
|
||||
assert "Input should be 'me' or 'team'" in res["message"]
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, chunk_method",
|
||||
[
|
||||
("naive", "naive"),
|
||||
("book", "book"),
|
||||
("email", "email"),
|
||||
("laws", "laws"),
|
||||
("manual", "manual"),
|
||||
("one", "one"),
|
||||
("paper", "paper"),
|
||||
("picture", "picture"),
|
||||
("presentation", "presentation"),
|
||||
("qa", "qa"),
|
||||
("table", "table"),
|
||||
("tag", "tag"),
|
||||
("chunk_method_default", None),
|
||||
],
|
||||
)
|
||||
def test_valid_chunk_method(self, get_http_api_auth, name, chunk_method):
|
||||
if chunk_method is None:
|
||||
payload = {"name": name}
|
||||
else:
|
||||
payload = {"name": name, "chunk_method": chunk_method}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 0, res
|
||||
if chunk_method is None:
|
||||
assert res["data"]["chunk_method"] == "naive", res
|
||||
else:
|
||||
assert res["data"]["chunk_method"] == chunk_method, res
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, chunk_method",
|
||||
[
|
||||
("empty", ""),
|
||||
("unknown", "unknown"),
|
||||
("type_error", list()),
|
||||
],
|
||||
)
|
||||
def test_invalid_chunk_method(self, get_http_api_auth, name, chunk_method):
|
||||
payload = {"name": name, "chunk_method": chunk_method}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 101, res
|
||||
assert "Input should be 'naive', 'book', 'email', 'laws', 'manual', 'one', 'paper', 'picture', 'presentation', 'qa', 'table' or 'tag'" in res["message"], res
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, parser_config",
|
||||
[
|
||||
("default_none", None),
|
||||
("default_empty", {}),
|
||||
("auto_keywords_min", {"auto_keywords": 0}),
|
||||
("auto_keywords_mid", {"auto_keywords": 16}),
|
||||
("auto_keywords_max", {"auto_keywords": 32}),
|
||||
("auto_questions_min", {"auto_questions": 0}),
|
||||
("auto_questions_mid", {"auto_questions": 5}),
|
||||
("auto_questions_max", {"auto_questions": 10}),
|
||||
("chunk_token_num_min", {"chunk_token_num": 1}),
|
||||
("chunk_token_num_mid", {"chunk_token_num": 1024}),
|
||||
("chunk_token_num_max", {"chunk_token_num": 2048}),
|
||||
("delimiter", {"delimiter": "\n"}),
|
||||
("delimiter_space", {"delimiter": " "}),
|
||||
("html4excel_true", {"html4excel": True}),
|
||||
("html4excel_false", {"html4excel": False}),
|
||||
("layout_recognize_DeepDOC", {"layout_recognize": "DeepDOC"}),
|
||||
("layout_recognize_navie", {"layout_recognize": "Plain Text"}),
|
||||
("tag_kb_ids", {"tag_kb_ids": ["1", "2"]}),
|
||||
("topn_tags_min", {"topn_tags": 1}),
|
||||
("topn_tags_mid", {"topn_tags": 5}),
|
||||
("topn_tags_max", {"topn_tags": 10}),
|
||||
("filename_embd_weight_min", {"filename_embd_weight": 0.1}),
|
||||
("filename_embd_weight_mid", {"filename_embd_weight": 0.5}),
|
||||
("filename_embd_weight_max", {"filename_embd_weight": 1.0}),
|
||||
("task_page_size_min", {"task_page_size": 1}),
|
||||
("task_page_size_mid", {"task_page_size": 5_000}),
|
||||
("task_page_size_max", {"task_page_size": 10_000}),
|
||||
("pages", {"pages": [[1, 100]]}),
|
||||
("pages_none", None),
|
||||
("graphrag_true", {"graphrag": {"use_graphrag": True}}),
|
||||
("graphrag_false", {"graphrag": {"use_graphrag": False}}),
|
||||
("graphrag_entity_types", {"graphrag": {"entity_types": ["age", "sex", "height", "weight"]}}),
|
||||
("graphrag_method_general", {"graphrag": {"method": "general"}}),
|
||||
("graphrag_method_light", {"graphrag": {"method": "light"}}),
|
||||
("graphrag_community_true", {"graphrag": {"community": True}}),
|
||||
("graphrag_community_false", {"graphrag": {"community": False}}),
|
||||
("graphrag_resolution_true", {"graphrag": {"resolution": True}}),
|
||||
("graphrag_resolution_false", {"graphrag": {"resolution": False}}),
|
||||
("raptor_true", {"raptor": {"use_raptor": True}}),
|
||||
("raptor_false", {"raptor": {"use_raptor": False}}),
|
||||
("raptor_prompt", {"raptor": {"prompt": "Who are you?"}}),
|
||||
("raptor_max_token_min", {"raptor": {"max_token": 1}}),
|
||||
("raptor_max_token_mid", {"raptor": {"max_token": 1024}}),
|
||||
("raptor_max_token_max", {"raptor": {"max_token": 2048}}),
|
||||
("raptor_threshold_min", {"raptor": {"threshold": 0.0}}),
|
||||
("raptor_threshold_mid", {"raptor": {"threshold": 0.5}}),
|
||||
("raptor_threshold_max", {"raptor": {"threshold": 1.0}}),
|
||||
("raptor_max_cluster_min", {"raptor": {"max_cluster": 1}}),
|
||||
("raptor_max_cluster_mid", {"raptor": {"max_cluster": 512}}),
|
||||
("raptor_max_cluster_max", {"raptor": {"max_cluster": 1024}}),
|
||||
("raptor_random_seed_min", {"raptor": {"random_seed": 0}}),
|
||||
("raptor_random_seed_mid", {"raptor": {"random_seed": 5_000}}),
|
||||
("raptor_random_seed_max", {"raptor": {"random_seed": 10_000}}),
|
||||
],
|
||||
ids=[
|
||||
"default_none",
|
||||
"default_empty",
|
||||
"auto_keywords_min",
|
||||
"auto_keywords_mid",
|
||||
"auto_keywords_max",
|
||||
"auto_questions_min",
|
||||
"auto_questions_mid",
|
||||
"auto_questions_max",
|
||||
"chunk_token_num_min",
|
||||
"chunk_token_num_mid",
|
||||
"chunk_token_num_max",
|
||||
"delimiter",
|
||||
"delimiter_space",
|
||||
"html4excel_true",
|
||||
"html4excel_false",
|
||||
"layout_recognize_DeepDOC",
|
||||
"layout_recognize_navie",
|
||||
"tag_kb_ids",
|
||||
"topn_tags_min",
|
||||
"topn_tags_mid",
|
||||
"topn_tags_max",
|
||||
"filename_embd_weight_min",
|
||||
"filename_embd_weight_mid",
|
||||
"filename_embd_weight_max",
|
||||
"task_page_size_min",
|
||||
"task_page_size_mid",
|
||||
"task_page_size_max",
|
||||
"pages",
|
||||
"pages_none",
|
||||
"graphrag_true",
|
||||
"graphrag_false",
|
||||
"graphrag_entity_types",
|
||||
"graphrag_method_general",
|
||||
"graphrag_method_light",
|
||||
"graphrag_community_true",
|
||||
"graphrag_community_false",
|
||||
"graphrag_resolution_true",
|
||||
"graphrag_resolution_false",
|
||||
"raptor_true",
|
||||
"raptor_false",
|
||||
"raptor_prompt",
|
||||
"raptor_max_token_min",
|
||||
"raptor_max_token_mid",
|
||||
"raptor_max_token_max",
|
||||
"raptor_threshold_min",
|
||||
"raptor_threshold_mid",
|
||||
"raptor_threshold_max",
|
||||
"raptor_max_cluster_min",
|
||||
"raptor_max_cluster_mid",
|
||||
"raptor_max_cluster_max",
|
||||
"raptor_random_seed_min",
|
||||
"raptor_random_seed_mid",
|
||||
"raptor_random_seed_max",
|
||||
],
|
||||
)
|
||||
def test_valid_parser_config(self, get_http_api_auth, name, parser_config):
|
||||
if parser_config is None:
|
||||
payload = {"name": name}
|
||||
else:
|
||||
payload = {"name": name, "parser_config": parser_config}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 0, res
|
||||
if parser_config is None:
|
||||
assert res["data"]["parser_config"] == {
|
||||
"chunk_token_num": 128,
|
||||
"delimiter": r"\n!?;。;!?",
|
||||
"html4excel": False,
|
||||
"layout_recognize": "DeepDOC",
|
||||
"raptor": {"use_raptor": False},
|
||||
}
|
||||
elif parser_config == {}:
|
||||
assert res["data"]["parser_config"] == {
|
||||
"auto_keywords": 0,
|
||||
"auto_questions": 0,
|
||||
"chunk_token_num": 128,
|
||||
"delimiter": r"\n!?;。;!?",
|
||||
"filename_embd_weight": None,
|
||||
"graphrag": None,
|
||||
"html4excel": False,
|
||||
"layout_recognize": "DeepDOC",
|
||||
"pages": None,
|
||||
"raptor": None,
|
||||
"tag_kb_ids": [],
|
||||
"task_page_size": None,
|
||||
"topn_tags": 1,
|
||||
}
|
||||
else:
|
||||
for k, v in parser_config.items():
|
||||
if isinstance(v, dict):
|
||||
for kk, vv in v.items():
|
||||
assert res["data"]["parser_config"][k][kk] == vv
|
||||
else:
|
||||
assert res["data"]["parser_config"][k] == v
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, parser_config, expected_message",
|
||||
[
|
||||
("auto_keywords_min_limit", {"auto_keywords": -1}, "Input should be greater than or equal to 0"),
|
||||
("auto_keywords_max_limit", {"auto_keywords": 33}, "Input should be less than or equal to 32"),
|
||||
("auto_keywords_float_not_allowed", {"auto_keywords": 3.14}, "Input should be a valid integer, got a number with a fractional part"),
|
||||
("auto_keywords_type_invalid", {"auto_keywords": "string"}, "Input should be a valid integer, unable to parse string as an integer"),
|
||||
("auto_questions_min_limit", {"auto_questions": -1}, "Input should be greater than or equal to 0"),
|
||||
("auto_questions_max_limit", {"auto_questions": 11}, "Input should be less than or equal to 10"),
|
||||
("auto_questions_float_not_allowed", {"auto_questions": 3.14}, "Input should be a valid integer, got a number with a fractional part"),
|
||||
("auto_questions_type_invalid", {"auto_questions": "string"}, "Input should be a valid integer, unable to parse string as an integer"),
|
||||
("chunk_token_num_min_limit", {"chunk_token_num": 0}, "Input should be greater than or equal to 1"),
|
||||
("chunk_token_num_max_limit", {"chunk_token_num": 2049}, "Input should be less than or equal to 2048"),
|
||||
("chunk_token_num_float_not_allowed", {"chunk_token_num": 3.14}, "Input should be a valid integer, got a number with a fractional part"),
|
||||
("chunk_token_num_type_invalid", {"chunk_token_num": "string"}, "Input should be a valid integer, unable to parse string as an integer"),
|
||||
("delimiter_empty", {"delimiter": ""}, "String should have at least 1 character"),
|
||||
("html4excel_type_invalid", {"html4excel": "string"}, "Input should be a valid boolean, unable to interpret input"),
|
||||
("tag_kb_ids_not_list", {"tag_kb_ids": "1,2"}, "Input should be a valid list"),
|
||||
("tag_kb_ids_int_in_list", {"tag_kb_ids": [1, 2]}, "Input should be a valid string"),
|
||||
("topn_tags_min_limit", {"topn_tags": 0}, "Input should be greater than or equal to 1"),
|
||||
("topn_tags_max_limit", {"topn_tags": 11}, "Input should be less than or equal to 10"),
|
||||
("topn_tags_float_not_allowed", {"topn_tags": 3.14}, "Input should be a valid integer, got a number with a fractional part"),
|
||||
("topn_tags_type_invalid", {"topn_tags": "string"}, "Input should be a valid integer, unable to parse string as an integer"),
|
||||
("filename_embd_weight_min_limit", {"filename_embd_weight": -1}, "Input should be greater than or equal to 0"),
|
||||
("filename_embd_weight_max_limit", {"filename_embd_weight": 1.1}, "Input should be less than or equal to 1"),
|
||||
("filename_embd_weight_type_invalid", {"filename_embd_weight": "string"}, "Input should be a valid number, unable to parse string as a number"),
|
||||
("task_page_size_min_limit", {"task_page_size": 0}, "Input should be greater than or equal to 1"),
|
||||
("task_page_size_max_limit", {"task_page_size": 10_001}, "Input should be less than or equal to 10000"),
|
||||
("task_page_size_float_not_allowed", {"task_page_size": 3.14}, "Input should be a valid integer, got a number with a fractional part"),
|
||||
("task_page_size_type_invalid", {"task_page_size": "string"}, "Input should be a valid integer, unable to parse string as an integer"),
|
||||
("pages_not_list", {"pages": "1,2"}, "Input should be a valid list"),
|
||||
("pages_not_list_in_list", {"pages": ["1,2"]}, "Input should be a valid list"),
|
||||
("pages_not_int_list", {"pages": [["string1", "string2"]]}, "Input should be a valid integer, unable to parse string as an integer"),
|
||||
("graphrag_type_invalid", {"graphrag": {"use_graphrag": "string"}}, "Input should be a valid boolean, unable to interpret input"),
|
||||
("graphrag_entity_types_not_list", {"graphrag": {"entity_types": "1,2"}}, "Input should be a valid list"),
|
||||
("graphrag_entity_types_not_str_in_list", {"graphrag": {"entity_types": [1, 2]}}, "nput should be a valid string"),
|
||||
("graphrag_method_unknown", {"graphrag": {"method": "unknown"}}, "Input should be 'light' or 'general'"),
|
||||
("graphrag_method_none", {"graphrag": {"method": None}}, "Input should be 'light' or 'general'"),
|
||||
("graphrag_community_type_invalid", {"graphrag": {"community": "string"}}, "Input should be a valid boolean, unable to interpret input"),
|
||||
("graphrag_resolution_type_invalid", {"graphrag": {"resolution": "string"}}, "Input should be a valid boolean, unable to interpret input"),
|
||||
("raptor_type_invalid", {"raptor": {"use_raptor": "string"}}, "Input should be a valid boolean, unable to interpret input"),
|
||||
("raptor_prompt_empty", {"raptor": {"prompt": ""}}, "String should have at least 1 character"),
|
||||
("raptor_prompt_space", {"raptor": {"prompt": " "}}, "String should have at least 1 character"),
|
||||
("raptor_max_token_min_limit", {"raptor": {"max_token": 0}}, "Input should be greater than or equal to 1"),
|
||||
("raptor_max_token_max_limit", {"raptor": {"max_token": 2049}}, "Input should be less than or equal to 2048"),
|
||||
("raptor_max_token_float_not_allowed", {"raptor": {"max_token": 3.14}}, "Input should be a valid integer, got a number with a fractional part"),
|
||||
("raptor_max_token_type_invalid", {"raptor": {"max_token": "string"}}, "Input should be a valid integer, unable to parse string as an integer"),
|
||||
("raptor_threshold_min_limit", {"raptor": {"threshold": -0.1}}, "Input should be greater than or equal to 0"),
|
||||
("raptor_threshold_max_limit", {"raptor": {"threshold": 1.1}}, "Input should be less than or equal to 1"),
|
||||
("raptor_threshold_type_invalid", {"raptor": {"threshold": "string"}}, "Input should be a valid number, unable to parse string as a number"),
|
||||
("raptor_max_cluster_min_limit", {"raptor": {"max_cluster": 0}}, "Input should be greater than or equal to 1"),
|
||||
("raptor_max_cluster_max_limit", {"raptor": {"max_cluster": 1025}}, "Input should be less than or equal to 1024"),
|
||||
("raptor_max_cluster_float_not_allowed", {"raptor": {"max_cluster": 3.14}}, "Input should be a valid integer, got a number with a fractional par"),
|
||||
("raptor_max_cluster_type_invalid", {"raptor": {"max_cluster": "string"}}, "Input should be a valid integer, unable to parse string as an integer"),
|
||||
("raptor_random_seed_min_limit", {"raptor": {"random_seed": -1}}, "Input should be greater than or equal to 0"),
|
||||
("raptor_random_seed_max_limit", {"raptor": {"random_seed": 10_001}}, "Input should be less than or equal to 10000"),
|
||||
("raptor_random_seed_float_not_allowed", {"raptor": {"random_seed": 3.14}}, "Input should be a valid integer, got a number with a fractional part"),
|
||||
("raptor_random_seed_type_invalid", {"raptor": {"random_seed": "string"}}, "Input should be a valid integer, unable to parse string as an integer"),
|
||||
("parser_config_type_invalid", {"delimiter": "a" * 65536}, "Parser config have at most 65535 characters"),
|
||||
],
|
||||
ids=[
|
||||
"auto_keywords_min_limit",
|
||||
"auto_keywords_max_limit",
|
||||
"auto_keywords_float_not_allowed",
|
||||
"auto_keywords_type_invalid",
|
||||
"auto_questions_min_limit",
|
||||
"auto_questions_max_limit",
|
||||
"auto_questions_float_not_allowed",
|
||||
"auto_questions_type_invalid",
|
||||
"chunk_token_num_min_limit",
|
||||
"chunk_token_num_max_limit",
|
||||
"chunk_token_num_float_not_allowed",
|
||||
"chunk_token_num_type_invalid",
|
||||
"delimiter_empty",
|
||||
"html4excel_type_invalid",
|
||||
"tag_kb_ids_not_list",
|
||||
"tag_kb_ids_int_in_list",
|
||||
"topn_tags_min_limit",
|
||||
"topn_tags_max_limit",
|
||||
"topn_tags_float_not_allowed",
|
||||
"topn_tags_type_invalid",
|
||||
"filename_embd_weight_min_limit",
|
||||
"filename_embd_weight_max_limit",
|
||||
"filename_embd_weight_type_invalid",
|
||||
"task_page_size_min_limit",
|
||||
"task_page_size_max_limit",
|
||||
"task_page_size_float_not_allowed",
|
||||
"task_page_size_type_invalid",
|
||||
"pages_not_list",
|
||||
"pages_not_list_in_list",
|
||||
"pages_not_int_list",
|
||||
"graphrag_type_invalid",
|
||||
"graphrag_entity_types_not_list",
|
||||
"graphrag_entity_types_not_str_in_list",
|
||||
"graphrag_method_unknown",
|
||||
"graphrag_method_none",
|
||||
"graphrag_community_type_invalid",
|
||||
"graphrag_resolution_type_invalid",
|
||||
"raptor_type_invalid",
|
||||
"raptor_prompt_empty",
|
||||
"raptor_prompt_space",
|
||||
"raptor_max_token_min_limit",
|
||||
"raptor_max_token_max_limit",
|
||||
"raptor_max_token_float_not_allowed",
|
||||
"raptor_max_token_type_invalid",
|
||||
"raptor_threshold_min_limit",
|
||||
"raptor_threshold_max_limit",
|
||||
"raptor_threshold_type_invalid",
|
||||
"raptor_max_cluster_min_limit",
|
||||
"raptor_max_cluster_max_limit",
|
||||
"raptor_max_cluster_float_not_allowed",
|
||||
"raptor_max_cluster_type_invalid",
|
||||
"raptor_random_seed_min_limit",
|
||||
"raptor_random_seed_max_limit",
|
||||
"raptor_random_seed_float_not_allowed",
|
||||
"raptor_random_seed_type_invalid",
|
||||
"parser_config_type_invalid",
|
||||
],
|
||||
)
|
||||
def test_invalid_parser_config(self, get_http_api_auth, name, parser_config, expected_message):
|
||||
payload = {"name": name, "parser_config": parser_config}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 101, res
|
||||
assert expected_message in res["message"], res
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_dataset_10k(self, get_http_api_auth):
|
||||
@ -74,329 +566,3 @@ class TestDatasetCreation:
|
||||
payload = {"name": f"dataset_{i}"}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 0, f"Failed to create dataset {i}"
|
||||
|
||||
def test_avatar(self, get_http_api_auth, tmp_path):
|
||||
fn = create_image_file(tmp_path / "ragflow_test.png")
|
||||
payload = {
|
||||
"name": "avatar_test",
|
||||
"avatar": encode_avatar(fn),
|
||||
}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 0
|
||||
|
||||
def test_description(self, get_http_api_auth):
|
||||
payload = {"name": "description_test", "description": "a" * 65536}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == 0
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, permission, expected_code",
|
||||
[
|
||||
("me", "me", 0),
|
||||
("team", "team", 0),
|
||||
("empty_permission", "", 0),
|
||||
("me_upercase", "ME", 102),
|
||||
("team_upercase", "TEAM", 102),
|
||||
("other_permission", "other_permission", 102),
|
||||
],
|
||||
)
|
||||
def test_permission(self, get_http_api_auth, name, permission, expected_code):
|
||||
payload = {"name": name, "permission": permission}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == expected_code
|
||||
if expected_code == 0 and permission != "":
|
||||
assert res["data"]["permission"] == permission
|
||||
if permission == "":
|
||||
assert res["data"]["permission"] == "me"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, chunk_method, expected_code",
|
||||
[
|
||||
("naive", "naive", 0),
|
||||
("manual", "manual", 0),
|
||||
("qa", "qa", 0),
|
||||
("table", "table", 0),
|
||||
("paper", "paper", 0),
|
||||
("book", "book", 0),
|
||||
("laws", "laws", 0),
|
||||
("presentation", "presentation", 0),
|
||||
("picture", "picture", 0),
|
||||
("one", "one", 0),
|
||||
("email", "email", 0),
|
||||
("tag", "tag", 0),
|
||||
("empty_chunk_method", "", 0),
|
||||
("other_chunk_method", "other_chunk_method", 102),
|
||||
],
|
||||
)
|
||||
def test_chunk_method(self, get_http_api_auth, name, chunk_method, expected_code):
|
||||
payload = {"name": name, "chunk_method": chunk_method}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == expected_code
|
||||
if expected_code == 0 and chunk_method != "":
|
||||
assert res["data"]["chunk_method"] == chunk_method
|
||||
if chunk_method == "":
|
||||
assert res["data"]["chunk_method"] == "naive"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, embedding_model, expected_code",
|
||||
[
|
||||
("BAAI/bge-large-zh-v1.5", "BAAI/bge-large-zh-v1.5", 0),
|
||||
(
|
||||
"maidalun1020/bce-embedding-base_v1",
|
||||
"maidalun1020/bce-embedding-base_v1",
|
||||
0,
|
||||
),
|
||||
("other_embedding_model", "other_embedding_model", 102),
|
||||
],
|
||||
)
|
||||
def test_embedding_model(self, get_http_api_auth, name, embedding_model, expected_code):
|
||||
payload = {"name": name, "embedding_model": embedding_model}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == expected_code
|
||||
if expected_code == 0:
|
||||
assert res["data"]["embedding_model"] == embedding_model
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, chunk_method, parser_config, expected_code, expected_message",
|
||||
[
|
||||
(
|
||||
"naive_default",
|
||||
"naive",
|
||||
{
|
||||
"chunk_token_num": 128,
|
||||
"layout_recognize": "DeepDOC",
|
||||
"html4excel": False,
|
||||
"delimiter": "\n!?。;!?",
|
||||
"task_page_size": 12,
|
||||
"raptor": {"use_raptor": False},
|
||||
},
|
||||
0,
|
||||
"",
|
||||
),
|
||||
("naive_empty", "naive", {}, 0, ""),
|
||||
(
|
||||
"naive_chunk_token_num_negative",
|
||||
"naive",
|
||||
{"chunk_token_num": -1},
|
||||
100,
|
||||
"AssertionError('chunk_token_num should be in range from 1 to 100000000')",
|
||||
),
|
||||
(
|
||||
"naive_chunk_token_num_zero",
|
||||
"naive",
|
||||
{"chunk_token_num": 0},
|
||||
100,
|
||||
"AssertionError('chunk_token_num should be in range from 1 to 100000000')",
|
||||
),
|
||||
(
|
||||
"naive_chunk_token_num_max",
|
||||
"naive",
|
||||
{"chunk_token_num": 100000000},
|
||||
100,
|
||||
"AssertionError('chunk_token_num should be in range from 1 to 100000000')",
|
||||
),
|
||||
(
|
||||
"naive_chunk_token_num_float",
|
||||
"naive",
|
||||
{"chunk_token_num": 3.14},
|
||||
100,
|
||||
"AssertionError('chunk_token_num should be int')",
|
||||
),
|
||||
(
|
||||
"naive_chunk_token_num_str",
|
||||
"naive",
|
||||
{"chunk_token_num": "1024"},
|
||||
100,
|
||||
"AssertionError('chunk_token_num should be int')",
|
||||
),
|
||||
(
|
||||
"naive_layout_recognize_DeepDOC",
|
||||
"naive",
|
||||
{"layout_recognize": "DeepDOC"},
|
||||
0,
|
||||
"",
|
||||
),
|
||||
(
|
||||
"naive_layout_recognize_Naive",
|
||||
"naive",
|
||||
{"layout_recognize": "Naive"},
|
||||
0,
|
||||
"",
|
||||
),
|
||||
("naive_html4excel_true", "naive", {"html4excel": True}, 0, ""),
|
||||
("naive_html4excel_false", "naive", {"html4excel": False}, 0, ""),
|
||||
(
|
||||
"naive_html4excel_not_bool",
|
||||
"naive",
|
||||
{"html4excel": 1},
|
||||
100,
|
||||
"AssertionError('html4excel should be True or False')",
|
||||
),
|
||||
("naive_delimiter_empty", "naive", {"delimiter": ""}, 0, ""),
|
||||
("naive_delimiter_backticks", "naive", {"delimiter": "`##`"}, 0, ""),
|
||||
(
|
||||
"naive_delimiter_not_str",
|
||||
"naive",
|
||||
{"delimiter": 1},
|
||||
100,
|
||||
"AssertionError('delimiter should be str')",
|
||||
),
|
||||
(
|
||||
"naive_task_page_size_negative",
|
||||
"naive",
|
||||
{"task_page_size": -1},
|
||||
100,
|
||||
"AssertionError('task_page_size should be in range from 1 to 100000000')",
|
||||
),
|
||||
(
|
||||
"naive_task_page_size_zero",
|
||||
"naive",
|
||||
{"task_page_size": 0},
|
||||
100,
|
||||
"AssertionError('task_page_size should be in range from 1 to 100000000')",
|
||||
),
|
||||
(
|
||||
"naive_task_page_size_max",
|
||||
"naive",
|
||||
{"task_page_size": 100000000},
|
||||
100,
|
||||
"AssertionError('task_page_size should be in range from 1 to 100000000')",
|
||||
),
|
||||
(
|
||||
"naive_task_page_size_float",
|
||||
"naive",
|
||||
{"task_page_size": 3.14},
|
||||
100,
|
||||
"AssertionError('task_page_size should be int')",
|
||||
),
|
||||
(
|
||||
"naive_task_page_size_str",
|
||||
"naive",
|
||||
{"task_page_size": "1024"},
|
||||
100,
|
||||
"AssertionError('task_page_size should be int')",
|
||||
),
|
||||
("naive_raptor_true", "naive", {"raptor": {"use_raptor": True}}, 0, ""),
|
||||
("naive_raptor_false", "naive", {"raptor": {"use_raptor": False}}, 0, ""),
|
||||
(
|
||||
"invalid_key",
|
||||
"naive",
|
||||
{"invalid_key": "invalid_value"},
|
||||
100,
|
||||
"""AssertionError("Abnormal \'parser_config\'. Invalid key: invalid_key")""",
|
||||
),
|
||||
(
|
||||
"naive_auto_keywords_negative",
|
||||
"naive",
|
||||
{"auto_keywords": -1},
|
||||
100,
|
||||
"AssertionError('auto_keywords should be in range from 0 to 32')",
|
||||
),
|
||||
(
|
||||
"naive_auto_keywords_max",
|
||||
"naive",
|
||||
{"auto_keywords": 32},
|
||||
100,
|
||||
"AssertionError('auto_keywords should be in range from 0 to 32')",
|
||||
),
|
||||
(
|
||||
"naive_auto_keywords_float",
|
||||
"naive",
|
||||
{"auto_keywords": 3.14},
|
||||
100,
|
||||
"AssertionError('auto_keywords should be int')",
|
||||
),
|
||||
(
|
||||
"naive_auto_keywords_str",
|
||||
"naive",
|
||||
{"auto_keywords": "1024"},
|
||||
100,
|
||||
"AssertionError('auto_keywords should be int')",
|
||||
),
|
||||
(
|
||||
"naive_auto_questions_negative",
|
||||
"naive",
|
||||
{"auto_questions": -1},
|
||||
100,
|
||||
"AssertionError('auto_questions should be in range from 0 to 10')",
|
||||
),
|
||||
(
|
||||
"naive_auto_questions_max",
|
||||
"naive",
|
||||
{"auto_questions": 10},
|
||||
100,
|
||||
"AssertionError('auto_questions should be in range from 0 to 10')",
|
||||
),
|
||||
(
|
||||
"naive_auto_questions_float",
|
||||
"naive",
|
||||
{"auto_questions": 3.14},
|
||||
100,
|
||||
"AssertionError('auto_questions should be int')",
|
||||
),
|
||||
(
|
||||
"naive_auto_questions_str",
|
||||
"naive",
|
||||
{"auto_questions": "1024"},
|
||||
100,
|
||||
"AssertionError('auto_questions should be int')",
|
||||
),
|
||||
(
|
||||
"naive_topn_tags_negative",
|
||||
"naive",
|
||||
{"topn_tags": -1},
|
||||
100,
|
||||
"AssertionError('topn_tags should be in range from 0 to 10')",
|
||||
),
|
||||
(
|
||||
"naive_topn_tags_max",
|
||||
"naive",
|
||||
{"topn_tags": 10},
|
||||
100,
|
||||
"AssertionError('topn_tags should be in range from 0 to 10')",
|
||||
),
|
||||
(
|
||||
"naive_topn_tags_float",
|
||||
"naive",
|
||||
{"topn_tags": 3.14},
|
||||
100,
|
||||
"AssertionError('topn_tags should be int')",
|
||||
),
|
||||
(
|
||||
"naive_topn_tags_str",
|
||||
"naive",
|
||||
{"topn_tags": "1024"},
|
||||
100,
|
||||
"AssertionError('topn_tags should be int')",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_parser_configs(
|
||||
self,
|
||||
get_http_api_auth,
|
||||
name,
|
||||
chunk_method,
|
||||
parser_config,
|
||||
expected_code,
|
||||
expected_message,
|
||||
):
|
||||
payload = {
|
||||
"name": name,
|
||||
"chunk_method": chunk_method,
|
||||
"parser_config": parser_config,
|
||||
}
|
||||
res = create_dataset(get_http_api_auth, payload)
|
||||
assert res["code"] == expected_code
|
||||
if expected_code == 0 and parser_config != {}:
|
||||
for k, v in parser_config.items():
|
||||
assert res["data"]["parser_config"][k] == v
|
||||
if expected_code != 0 or expected_message:
|
||||
assert res["message"] == expected_message
|
||||
if parser_config == {}:
|
||||
assert res["data"]["parser_config"] == {
|
||||
"chunk_token_num": 128,
|
||||
"delimiter": "\\n!?;。;!?",
|
||||
"html4excel": False,
|
||||
"layout_recognize": "DeepDOC",
|
||||
"raptor": {"use_raptor": False},
|
||||
}
|
||||
|
@ -14,10 +14,11 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from ragflow_sdk import RAGFlow
|
||||
import random
|
||||
|
||||
import pytest
|
||||
from common import HOST_ADDRESS
|
||||
from ragflow_sdk import RAGFlow
|
||||
|
||||
|
||||
def test_create_dataset_with_name(get_api_key_fixture):
|
||||
@ -32,7 +33,7 @@ def test_create_dataset_with_duplicated_name(get_api_key_fixture):
|
||||
rag.create_dataset("test_create_dataset_with_duplicated_name")
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
rag.create_dataset("test_create_dataset_with_duplicated_name")
|
||||
assert str(exc_info.value) == "Duplicated dataset name in creating dataset."
|
||||
assert str(exc_info.value) == "Dataset name 'test_create_dataset_with_duplicated_name' already exists"
|
||||
|
||||
|
||||
def test_create_dataset_with_random_chunk_method(get_api_key_fixture):
|
||||
@ -46,11 +47,13 @@ def test_create_dataset_with_random_chunk_method(get_api_key_fixture):
|
||||
def test_create_dataset_with_invalid_parameter(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
valid_chunk_methods = ["naive", "manual", "qa", "table", "paper", "book", "laws", "presentation", "picture", "one", "email", "tag"]
|
||||
chunk_method = "invalid_chunk_method"
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
rag.create_dataset("test_create_dataset_with_invalid_chunk_method", chunk_method=chunk_method)
|
||||
assert str(exc_info.value) == f"'{chunk_method}' is not in {valid_chunk_methods}"
|
||||
assert (
|
||||
str(exc_info.value)
|
||||
== f"Field: <chunk_method> - Message: <Input should be 'naive', 'book', 'email', 'laws', 'manual', 'one', 'paper', 'picture', 'presentation', 'qa', 'table' or 'tag'> - Value: <{chunk_method}>"
|
||||
)
|
||||
|
||||
|
||||
def test_update_dataset_with_name(get_api_key_fixture):
|
||||
|
306
sdk/python/uv.lock
generated
306
sdk/python/uv.lock
generated
@ -1,6 +1,16 @@
|
||||
version = 1
|
||||
revision = 1
|
||||
requires-python = ">=3.10, <3.13"
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "25.3.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "beartype"
|
||||
version = "0.18.5"
|
||||
@ -12,11 +22,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2025.1.31"
|
||||
version = "2025.4.26"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -103,6 +113,20 @@ wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hypothesis"
|
||||
version = "6.131.9"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
dependencies = [
|
||||
{ name = "attrs" },
|
||||
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
||||
{ name = "sortedcontainers" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/10/ff/217417d065aa8a4e6815ddc39acee1222f1b67bd0e4803b85de86a837873/hypothesis-6.131.9.tar.gz", hash = "sha256:ee9b0e1403e1121c91921dbdc79d7f509fdb96d457a0389222d2a68d6c8a8f8e" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/bd/e5/41a6733bfe11997795669dec3b3d785c28918e06568a2540dcc29f0d3fa7/hypothesis-6.131.9-py3-none-any.whl", hash = "sha256:7c2d9d6382e98e5337b27bd34e5b223bac23956787a827e1d087e00d893561d6" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
@ -114,76 +138,76 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.0.0"
|
||||
version = "2.1.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lxml"
|
||||
version = "5.3.1"
|
||||
version = "5.4.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/ef/f6/c15ca8e5646e937c148e147244817672cf920b56ac0bf2cc1512ae674be8/lxml-5.3.1.tar.gz", hash = "sha256:106b7b5d2977b339f1e97efe2778e2ab20e99994cbb0ec5e55771ed0795920c8" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/76/3d/14e82fc7c8fb1b7761f7e748fd47e2ec8276d137b6acfe5a4bb73853e08f/lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/80/4b/73426192004a643c11a644ed2346dbe72da164c8e775ea2e70f60e63e516/lxml-5.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4058f16cee694577f7e4dd410263cd0ef75644b43802a689c2b3c2a7e69453b" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/30/c2/3b28f642b43fdf9580d936e8fdd3ec43c01a97ecfe17fd67f76ce9099752/lxml-5.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:364de8f57d6eda0c16dcfb999af902da31396949efa0e583e12675d09709881b" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/1f/a5/45279e464174b99d72d25bc018b097f9211c0925a174ca582a415609f036/lxml-5.3.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:528f3a0498a8edc69af0559bdcf8a9f5a8bf7c00051a6ef3141fdcf27017bbf5" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f0/e7/10cd8b9e27ffb6b3465b76604725b67b7c70d4e399750ff88de1b38ab9eb/lxml-5.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db4743e30d6f5f92b6d2b7c86b3ad250e0bad8dee4b7ad8a0c44bfb276af89a3" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/ce/54/2d6f634924920b17122445136345d44c6d69178c9c49e161aa8f206739d6/lxml-5.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:17b5d7f8acf809465086d498d62a981fa6a56d2718135bb0e4aa48c502055f5c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a2/fe/7f5ae8fd1f357fcb21b0d4e20416fae870d654380b6487adbcaaf0df9b31/lxml-5.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:928e75a7200a4c09e6efc7482a1337919cc61fe1ba289f297827a5b76d8969c2" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/af/70/22fecb6f2ca8dc77d14ab6be3cef767ff8340040bc95dca384b5b1cb333a/lxml-5.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a997b784a639e05b9d4053ef3b20c7e447ea80814a762f25b8ed5a89d261eac" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/63/91/21619cc14f7fd1de3f1bdf86cc8106edacf4d685b540d658d84247a3a32a/lxml-5.3.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:7b82e67c5feb682dbb559c3e6b78355f234943053af61606af126df2183b9ef9" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/50/0f/27183248fa3cdd2040047ceccd320ff1ed1344167f38a4ac26aed092268b/lxml-5.3.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:f1de541a9893cf8a1b1db9bf0bf670a2decab42e3e82233d36a74eda7822b4c9" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c6/8d/9b7388d5b23ed2f239a992a478cbd0ce313aaa2d008dd73c4042b190b6a9/lxml-5.3.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:de1fc314c3ad6bc2f6bd5b5a5b9357b8c6896333d27fdbb7049aea8bd5af2d79" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/65/8e/590e20833220eac55b6abcde71d3ae629d38ac1c3543bcc2bfe1f3c2f5d1/lxml-5.3.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:7c0536bd9178f754b277a3e53f90f9c9454a3bd108b1531ffff720e082d824f2" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/4e/77/cabdf5569fd0415a88ebd1d62d7f2814e71422439b8564aaa03e7eefc069/lxml-5.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:68018c4c67d7e89951a91fbd371e2e34cd8cfc71f0bb43b5332db38497025d51" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/49/bd/f0b6d50ea7b8b54aaa5df4410cb1d5ae6ffa016b8e0503cae08b86c24674/lxml-5.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aa826340a609d0c954ba52fd831f0fba2a4165659ab0ee1a15e4aac21f302406" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/fa/69/1793d00a4e3da7f27349edb5a6f3da947ed921263cd9a243fab11c6cbc07/lxml-5.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:796520afa499732191e39fc95b56a3b07f95256f2d22b1c26e217fb69a9db5b5" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d3/c9/e2449129b6cb2054c898df8d850ea4dadd75b4c33695a6c4b0f35082f1e7/lxml-5.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3effe081b3135237da6e4c4530ff2a868d3f80be0bda027e118a5971285d42d0" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/ed/63/e5da540eba6ab9a0d4188eeaa5c85767b77cafa8efeb70da0593d6cd3b81/lxml-5.3.1-cp310-cp310-win32.whl", hash = "sha256:a22f66270bd6d0804b02cd49dae2b33d4341015545d17f8426f2c4e22f557a23" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/08/71/853a3ad812cd24c35b7776977cb0ae40c2b64ff79ad6d6c36c987daffc49/lxml-5.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:0bcfadea3cdc68e678d2b20cb16a16716887dd00a881e16f7d806c2138b8ff0c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/57/bb/2faea15df82114fa27f2a86eec220506c532ee8ce211dff22f48881b353a/lxml-5.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e220f7b3e8656ab063d2eb0cd536fafef396829cafe04cb314e734f87649058f" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/9f/d3/374114084abb1f96026eccb6cd48b070f85de82fdabae6c2f1e198fa64e5/lxml-5.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f2cfae0688fd01f7056a17367e3b84f37c545fb447d7282cf2c242b16262607" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/0f/fb/44a46efdc235c2dd763c1e929611d8ff3b920c32b8fcd9051d38f4d04633/lxml-5.3.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67d2f8ad9dcc3a9e826bdc7802ed541a44e124c29b7d95a679eeb58c1c14ade8" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/3b/e5/168ddf9f16a90b590df509858ae97a8219d6999d5a132ad9f72427454bed/lxml-5.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db0c742aad702fd5d0c6611a73f9602f20aec2007c102630c06d7633d9c8f09a" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f9/0e/3e2742c6f4854b202eb8587c1f7ed760179f6a9fcb34a460497c8c8f3078/lxml-5.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:198bb4b4dd888e8390afa4f170d4fa28467a7eaf857f1952589f16cfbb67af27" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/b8/03/b2f2ab9e33c47609c80665e75efed258b030717e06693835413b34e797cb/lxml-5.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2a3e412ce1849be34b45922bfef03df32d1410a06d1cdeb793a343c2f1fd666" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/93/ad/0ecfb082b842358c8a9e3115ec944b7240f89821baa8cd7c0cb8a38e05cb/lxml-5.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b8969dbc8d09d9cd2ae06362c3bad27d03f433252601ef658a49bd9f2b22d79" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/64/5b/3e93d8ebd2b7eb984c2ad74dfff75493ce96e7b954b12e4f5fc34a700414/lxml-5.3.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5be8f5e4044146a69c96077c7e08f0709c13a314aa5315981185c1f00235fe65" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/91/83/7dc412362ee7a0259c7f64349393262525061fad551a1340ef92c59d9732/lxml-5.3.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:133f3493253a00db2c870d3740bc458ebb7d937bd0a6a4f9328373e0db305709" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/1e/41/c337f121d9dca148431f246825e021fa1a3f66a6b975deab1950530fdb04/lxml-5.3.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:52d82b0d436edd6a1d22d94a344b9a58abd6c68c357ed44f22d4ba8179b37629" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a5/73/762c319c4906b3db67e4abc7cfe7d66c34996edb6d0e8cb60f462954d662/lxml-5.3.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b6f92e35e2658a5ed51c6634ceb5ddae32053182851d8cad2a5bc102a359b33" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c1/e7/d1e296cb3b3b46371220a31350730948d7bea41cc9123c5fd219dea33c29/lxml-5.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:203b1d3eaebd34277be06a3eb880050f18a4e4d60861efba4fb946e31071a295" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/df/90/4adc854475105b93ead6c0c736f762d29371751340dcf5588cfcf8191b8a/lxml-5.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:155e1a5693cf4b55af652f5c0f78ef36596c7f680ff3ec6eb4d7d85367259b2c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f0/0d/39864efbd231c13eb53edee2ab91c742c24d2f93efe2af7d3fe4343e42c1/lxml-5.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22ec2b3c191f43ed21f9545e9df94c37c6b49a5af0a874008ddc9132d49a2d9c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/8d/7a/630a64ceb1088196de182e2e33b5899691c3e1ae21af688e394208bd6810/lxml-5.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7eda194dd46e40ec745bf76795a7cccb02a6a41f445ad49d3cf66518b0bd9cff" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/b2/3d/091bc7b592333754cb346c1507ca948ab39bc89d83577ac8f1da3be4dece/lxml-5.3.1-cp311-cp311-win32.whl", hash = "sha256:fb7c61d4be18e930f75948705e9718618862e6fc2ed0d7159b2262be73f167a2" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/12/8c/7d47cfc0d04fd4e3639ec7e1c96c2561d5e890eb900de8f76eea75e0964a/lxml-5.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c809eef167bf4a57af4b03007004896f5c60bd38dc3852fcd97a26eae3d4c9e6" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/3b/f4/5121aa9ee8e09b8b8a28cf3709552efe3d206ca51a20d6fa471b60bb3447/lxml-5.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e69add9b6b7b08c60d7ff0152c7c9a6c45b4a71a919be5abde6f98f1ea16421c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/0a/ca/8e9aa01edddc74878f4aea85aa9ab64372f46aa804d1c36dda861bf9eabf/lxml-5.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4e52e1b148867b01c05e21837586ee307a01e793b94072d7c7b91d2c2da02ffe" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/b2/b3/ea40a5c98619fbd7e9349df7007994506d396b97620ced34e4e5053d3734/lxml-5.3.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4b382e0e636ed54cd278791d93fe2c4f370772743f02bcbe431a160089025c9" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/3a/5e/375418be35f8a695cadfe7e7412f16520e62e24952ed93c64c9554755464/lxml-5.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2e49dc23a10a1296b04ca9db200c44d3eb32c8d8ec532e8c1fd24792276522a" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/79/7c/d258eaaa9560f6664f9b426a5165103015bee6512d8931e17342278bad0a/lxml-5.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4399b4226c4785575fb20998dc571bc48125dc92c367ce2602d0d70e0c455eb0" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/03/bc/a041415be4135a1b3fdf017a5d873244cc16689456166fbdec4b27fba153/lxml-5.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5412500e0dc5481b1ee9cf6b38bb3b473f6e411eb62b83dc9b62699c3b7b79f7" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/32/88/047f24967d5e3fc97848ea2c207eeef0f16239cdc47368c8b95a8dc93a33/lxml-5.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c93ed3c998ea8472be98fb55aed65b5198740bfceaec07b2eba551e55b7b9ae" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/3d/b5/ecf5a20937ecd21af02c5374020f4e3a3538e10a32379a7553fca3d77094/lxml-5.3.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:63d57fc94eb0bbb4735e45517afc21ef262991d8758a8f2f05dd6e4174944519" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a4/05/56c359e07275911ed5f35ab1d63c8cd3360d395fb91e43927a2ae90b0322/lxml-5.3.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:b450d7cabcd49aa7ab46a3c6aa3ac7e1593600a1a0605ba536ec0f1b99a04322" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/b7/f4/f95e3ae12e9f32fbcde00f9affa6b0df07f495117f62dbb796a9a31c84d6/lxml-5.3.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:4df0ec814b50275ad6a99bc82a38b59f90e10e47714ac9871e1b223895825468" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c5/f8/309546aec092434166a6e11c7dcecb5c2d0a787c18c072d61e18da9eba57/lxml-5.3.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d184f85ad2bb1f261eac55cddfcf62a70dee89982c978e92b9a74a1bfef2e367" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/71/1c/b951817cb5058ca7c332d012dfe8bc59dabd0f0a8911ddd7b7ea8e41cfbd/lxml-5.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b725e70d15906d24615201e650d5b0388b08a5187a55f119f25874d0103f90dd" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/31/23/45feba8dae1d35fcca1e51b051f59dc4223cbd23e071a31e25f3f73938a8/lxml-5.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a31fa7536ec1fb7155a0cd3a4e3d956c835ad0a43e3610ca32384d01f079ea1c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/61/69/be245d7b2dbef81c542af59c97fcd641fbf45accf2dc1c325bae7d0d014c/lxml-5.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3c3c8b55c7fc7b7e8877b9366568cc73d68b82da7fe33d8b98527b73857a225f" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/69/06/128af2ed04bac99b8f83becfb74c480f1aa18407b5c329fad457e08a1bf4/lxml-5.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d61ec60945d694df806a9aec88e8f29a27293c6e424f8ff91c80416e3c617645" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/8a/2d/f03a21cf6cc75cdd083563e509c7b6b159d761115c4142abb5481094ed8c/lxml-5.3.1-cp312-cp312-win32.whl", hash = "sha256:f4eac0584cdc3285ef2e74eee1513a6001681fd9753b259e8159421ed28a72e5" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/2b/9c/8abe21585d20ef70ad9cec7562da4332b764ed69ec29b7389d23dfabcea0/lxml-5.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:29bfc8d3d88e56ea0a27e7c4897b642706840247f59f4377d81be8f32aa0cfbf" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d2/b4/89a68d05f267f05cc1b8b2f289a8242955705b1b0a9d246198227817ee46/lxml-5.3.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:afa578b6524ff85fb365f454cf61683771d0170470c48ad9d170c48075f86725" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/7f/0d/c034a541e7a1153527d7880c62493a74f2277f38e64de2480cadd0d4cf96/lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f5e80adf0aafc7b5454f2c1cb0cde920c9b1f2cbd0485f07cc1d0497c35c5d" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/35/5c/38e183c2802f14fbdaa75c3266e11d0ca05c64d78e8cdab2ee84e954a565/lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dd0b80ac2d8f13ffc906123a6f20b459cb50a99222d0da492360512f3e50f84" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/18/5b/14f93b359b3c29673d5d282bc3a6edb3a629879854a77541841aba37607f/lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:422c179022ecdedbe58b0e242607198580804253da220e9454ffe848daa1cfd2" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f6/08/8471de65f3dee70a3a50e7082fd7409f0ac7a1ace777c13fca4aea1a5759/lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:524ccfded8989a6595dbdda80d779fb977dbc9a7bc458864fc9a0c2fc15dc877" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/83/29/00b9b0322a473aee6cda87473401c9abb19506cd650cc69a8aa38277ea74/lxml-5.3.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:48fd46bf7155def2e15287c6f2b133a2f78e2d22cdf55647269977b873c65499" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f5/1f/a3b6b74a451ceb84b471caa75c934d2430a4d84395d38ef201d539f38cd1/lxml-5.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e7bc6df34d42322c5289e37e9971d6ed114e3776b45fa879f734bded9d1fea9c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/36/af/a567a55b3e47135b4d1f05a1118c24529104c003f95851374b3748139dc1/lxml-5.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6854f8bd8a1536f8a1d9a3655e6354faa6406621cf857dc27b681b69860645c7" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/50/ba/4ee47d24c675932b3eb5b6de77d0f623c2db6dc466e7a1f199792c5e3e3a/lxml-5.4.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:696ea9e87442467819ac22394ca36cb3d01848dad1be6fac3fb612d3bd5a12cf" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f2/0f/b4db6dfebfefe3abafe360f42a3d471881687fd449a0b86b70f1f2683438/lxml-5.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef80aeac414f33c24b3815ecd560cee272786c3adfa5f31316d8b349bfade28" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/0b/1f/0bb1bae1ce056910f8db81c6aba80fec0e46c98d77c0f59298c70cd362a3/lxml-5.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b9c2754cef6963f3408ab381ea55f47dabc6f78f4b8ebb0f0b25cf1ac1f7609" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/21/f5/e7b66a533fc4a1e7fa63dd22a1ab2ec4d10319b909211181e1ab3e539295/lxml-5.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a62cc23d754bb449d63ff35334acc9f5c02e6dae830d78dab4dd12b78a524f4" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/11/39/a38244b669c2d95a6a101a84d3c85ba921fea827e9e5483e93168bf1ccb2/lxml-5.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f82125bc7203c5ae8633a7d5d20bcfdff0ba33e436e4ab0abc026a53a8960b7" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/db/64/48cac242347a09a07740d6cee7b7fd4663d5c1abd65f2e3c60420e231b27/lxml-5.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b67319b4aef1a6c56576ff544b67a2a6fbd7eaee485b241cabf53115e8908b8f" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/98/89/97442835fbb01d80b72374f9594fe44f01817d203fa056e9906128a5d896/lxml-5.4.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:a8ef956fce64c8551221f395ba21d0724fed6b9b6242ca4f2f7beb4ce2f41997" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f1/97/164ca398ee654eb21f29c6b582685c6c6b9d62d5213abc9b8380278e9c0a/lxml-5.4.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:0a01ce7d8479dce84fc03324e3b0c9c90b1ece9a9bb6a1b6c9025e7e4520e78c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d0/bc/712b96823d7feb53482d2e4f59c090fb18ec7b0d0b476f353b3085893cda/lxml-5.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:91505d3ddebf268bb1588eb0f63821f738d20e1e7f05d3c647a5ca900288760b" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d4/55/a62a39e8f9da2a8b6002603475e3c57c870cd9c95fd4b94d4d9ac9036055/lxml-5.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a3bcdde35d82ff385f4ede021df801b5c4a5bcdfb61ea87caabcebfc4945dc1b" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/ea/47/a393728ae001b92bb1a9e095e570bf71ec7f7fbae7688a4792222e56e5b9/lxml-5.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aea7c06667b987787c7d1f5e1dfcd70419b711cdb47d6b4bb4ad4b76777a0563" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/5e/5f/9dcaaad037c3e642a7ea64b479aa082968de46dd67a8293c541742b6c9db/lxml-5.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7fb111eef4d05909b82152721a59c1b14d0f365e2be4c742a473c5d7372f4f5" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a7/0a/ebcae89edf27e61c45023005171d0ba95cb414ee41c045ae4caf1b8487fd/lxml-5.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43d549b876ce64aa18b2328faff70f5877f8c6dede415f80a2f799d31644d776" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/42/ad/cc8140ca99add7d85c92db8b2354638ed6d5cc0e917b21d36039cb15a238/lxml-5.4.0-cp310-cp310-win32.whl", hash = "sha256:75133890e40d229d6c5837b0312abbe5bac1c342452cf0e12523477cd3aa21e7" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e9/39/597ce090da1097d2aabd2f9ef42187a6c9c8546d67c419ce61b88b336c85/lxml-5.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:de5b4e1088523e2b6f730d0509a9a813355b7f5659d70eb4f319c76beea2e250" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/81/2d/67693cc8a605a12e5975380d7ff83020dcc759351b5a066e1cced04f797b/lxml-5.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:98a3912194c079ef37e716ed228ae0dcb960992100461b704aea4e93af6b0bb9" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/73/53/b5a05ab300a808b72e848efd152fe9c022c0181b0a70b8bca1199f1bed26/lxml-5.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ea0252b51d296a75f6118ed0d8696888e7403408ad42345d7dfd0d1e93309a7" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d8/cb/1a3879c5f512bdcd32995c301886fe082b2edd83c87d41b6d42d89b4ea4d/lxml-5.4.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92b69441d1bd39f4940f9eadfa417a25862242ca2c396b406f9272ef09cdcaa" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f9/94/bbc66e42559f9d04857071e3b3d0c9abd88579367fd2588a4042f641f57e/lxml-5.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20e16c08254b9b6466526bc1828d9370ee6c0d60a4b64836bc3ac2917d1e16df" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/66/95/34b0679bee435da2d7cae895731700e519a8dfcab499c21662ebe671603e/lxml-5.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7605c1c32c3d6e8c990dd28a0970a3cbbf1429d5b92279e37fda05fb0c92190e" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e0/5d/abfcc6ab2fa0be72b2ba938abdae1f7cad4c632f8d552683ea295d55adfb/lxml-5.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecf4c4b83f1ab3d5a7ace10bafcb6f11df6156857a3c418244cef41ca9fa3e44" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/5a/78/6bd33186c8863b36e084f294fc0a5e5eefe77af95f0663ef33809cc1c8aa/lxml-5.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef4feae82709eed352cd7e97ae062ef6ae9c7b5dbe3663f104cd2c0e8d94ba" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/3b/74/4d7ad4839bd0fc64e3d12da74fc9a193febb0fae0ba6ebd5149d4c23176a/lxml-5.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:df53330a3bff250f10472ce96a9af28628ff1f4efc51ccba351a8820bca2a8ba" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/24/0d/0a98ed1f2471911dadfc541003ac6dd6879fc87b15e1143743ca20f3e973/lxml-5.4.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:aefe1a7cb852fa61150fcb21a8c8fcea7b58c4cb11fbe59c97a0a4b31cae3c8c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/48/de/d4f7e4c39740a6610f0f6959052b547478107967362e8424e1163ec37ae8/lxml-5.4.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ef5a7178fcc73b7d8c07229e89f8eb45b2908a9238eb90dcfc46571ccf0383b8" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/07/8c/61763abd242af84f355ca4ef1ee096d3c1b7514819564cce70fd18c22e9a/lxml-5.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d2ed1b3cb9ff1c10e6e8b00941bb2e5bb568b307bfc6b17dffbbe8be5eecba86" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f9/c5/6d7e3b63e7e282619193961a570c0a4c8a57fe820f07ca3fe2f6bd86608a/lxml-5.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:72ac9762a9f8ce74c9eed4a4e74306f2f18613a6b71fa065495a67ac227b3056" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/71/4a/e60a306df54680b103348545706a98a7514a42c8b4fbfdcaa608567bb065/lxml-5.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f5cb182f6396706dc6cc1896dd02b1c889d644c081b0cdec38747573db88a7d7" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/27/f2/9754aacd6016c930875854f08ac4b192a47fe19565f776a64004aa167521/lxml-5.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3a3178b4873df8ef9457a4875703488eb1622632a9cee6d76464b60e90adbfcd" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/38/a2/0c49ec6941428b1bd4f280650d7b11a0f91ace9db7de32eb7aa23bcb39ff/lxml-5.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e094ec83694b59d263802ed03a8384594fcce477ce484b0cbcd0008a211ca751" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/7a/75/87a3963a08eafc46a86c1131c6e28a4de103ba30b5ae903114177352a3d7/lxml-5.4.0-cp311-cp311-win32.whl", hash = "sha256:4329422de653cdb2b72afa39b0aa04252fca9071550044904b2e7036d9d97fe4" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/fa/f9/1f0964c4f6c2be861c50db380c554fb8befbea98c6404744ce243a3c87ef/lxml-5.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd3be6481ef54b8cfd0e1e953323b7aa9d9789b94842d0e5b142ef4bb7999539" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f8/4c/d101ace719ca6a4ec043eb516fcfcb1b396a9fccc4fcd9ef593df34ba0d5/lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/11/84/beddae0cec4dd9ddf46abf156f0af451c13019a0fa25d7445b655ba5ccb7/lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d0/25/d0d93a4e763f0462cccd2b8a665bf1e4343dd788c76dcfefa289d46a38a9/lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/31/ce/1df18fb8f7946e7f3388af378b1f34fcf253b94b9feedb2cec5969da8012/lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/4e/62/f4a6c60ae7c40d43657f552f3045df05118636be1165b906d3423790447f/lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/9e/aa/04f00009e1e3a77838c7fc948f161b5d2d5de1136b2b81c712a263829ea4/lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c9/1f/e0b2f61fa2404bf0f1fdf1898377e5bd1b74cc9b2cf2c6ba8509b8f27990/lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/24/a2/8263f351b4ffe0ed3e32ea7b7830f845c795349034f912f490180d88a877/lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/05/00/41db052f279995c0e35c79d0f0fc9f8122d5b5e9630139c592a0b58c71b4/lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/1d/be/ee99e6314cdef4587617d3b3b745f9356d9b7dd12a9663c5f3b5734b64ba/lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/ad/36/239820114bf1d71f38f12208b9c58dec033cbcf80101cde006b9bde5cffd/lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d4/e1/1b795cc0b174efc9e13dbd078a9ff79a58728a033142bc6d70a1ee8fc34d/lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/72/48/3c198455ca108cec5ae3662ae8acd7fd99476812fd712bb17f1b39a0b589/lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d6/10/5bf51858971c51ec96cfc13e800a9951f3fd501686f4c18d7d84fe2d6352/lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/2b/11/06710dd809205377da380546f91d2ac94bad9ff735a72b64ec029f706c85/lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f5/b0/15b6217834b5e3a59ebf7f53125e08e318030e8cc0d7310355e6edac98ef/lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/91/1e/05ddcb57ad2f3069101611bd5f5084157d90861a2ef460bf42f45cced944/lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c6/b0/e4d1cbb8c078bc4ae44de9c6a79fec4e2b4151b1b4d50af71d799e76b177/lxml-5.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1b717b00a71b901b4667226bba282dd462c42ccf618ade12f9ba3674e1fabc55" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/5b/aa/e2bdefba40d815059bcb60b371a36fbfcce970a935370e1b367ba1cc8f74/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27a9ded0f0b52098ff89dd4c418325b987feed2ea5cc86e8860b0f844285d740" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/3c/5f/91ff89d1e092e7cfdd8453a939436ac116db0a665e7f4be0cd8e65c7dc5a/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7ce10634113651d6f383aa712a194179dcd496bd8c41e191cec2099fa09de5" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/be/7c/8c3f15df2ca534589717bfd19d1e3482167801caedfa4d90a575facf68a6/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53370c26500d22b45182f98847243efb518d268374a9570409d2e2276232fd37" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/7d/d8/9567afb1665f64d73fc54eb904e418d1138d7f011ed00647121b4dd60b38/lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6364038c519dffdbe07e3cf42e6a7f8b90c275d4d1617a69bb59734c1a2d571" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f1/ab/fdbbd91d8d82bf1a723ba88ec3e3d76c022b53c391b0c13cad441cdb8f9e/lxml-5.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b12cb6527599808ada9eb2cd6e0e7d3d8f13fe7bbb01c6311255a15ded4c7ab4" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -200,59 +224,66 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "24.2"
|
||||
version = "25.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pillow"
|
||||
version = "11.1.0"
|
||||
version = "11.2.1"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/f3/af/c097e544e7bd278333db77933e535098c259609c4eb3b85381109602fb5b/pillow-11.1.0.tar.gz", hash = "sha256:368da70808b36d73b4b390a8ffac11069f8a5c85f29eff1f1b01bcf3ef5b2a20" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/af/cb/bb5c01fcd2a69335b86c22142b2bccfc3464087efb7fd382eee5ffc7fdf7/pillow-11.2.1.tar.gz", hash = "sha256:a64dd61998416367b7ef979b73d3a85853ba9bec4c2925f74e588879a58716b6" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/50/1c/2dcea34ac3d7bc96a1fd1bd0a6e06a57c67167fec2cff8d95d88229a8817/pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/14/ca/6bec3df25e4c88432681de94a3531cc738bd85dea6c7aa6ab6f81ad8bd11/pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d4/2c/668e18e5521e46eb9667b09e501d8e07049eb5bfe39d56be0724a43117e6/pillow-11.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07dba04c5e22824816b2615ad7a7484432d7f540e6fa86af60d2de57b0fcee2" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/02/80/79f99b714f0fc25f6a8499ecfd1f810df12aec170ea1e32a4f75746051ce/pillow-11.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e267b0ed063341f3e60acd25c05200df4193e15a4a5807075cd71225a2386e26" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/81/aa/8d4ad25dc11fd10a2001d5b8a80fdc0e564ac33b293bdfe04ed387e0fd95/pillow-11.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bd165131fd51697e22421d0e467997ad31621b74bfc0b75956608cb2906dda07" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/84/7a/cd0c3eaf4a28cb2a74bdd19129f7726277a7f30c4f8424cd27a62987d864/pillow-11.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:abc56501c3fd148d60659aae0af6ddc149660469082859fa7b066a298bde9482" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/8f/8b/a907fdd3ae8f01c7670dfb1499c53c28e217c338b47a813af8d815e7ce97/pillow-11.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:54ce1c9a16a9561b6d6d8cb30089ab1e5eb66918cb47d457bd996ef34182922e" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/6f/9a/9f139d9e8cccd661c3efbf6898967a9a337eb2e9be2b454ba0a09533100d/pillow-11.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:73ddde795ee9b06257dac5ad42fcb07f3b9b813f8c1f7f870f402f4dc54b5269" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a8/68/0d8d461f42a3f37432203c8e6df94da10ac8081b6d35af1c203bf3111088/pillow-11.1.0-cp310-cp310-win32.whl", hash = "sha256:3a5fe20a7b66e8135d7fd617b13272626a28278d0e578c98720d9ba4b2439d49" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/14/81/d0dff759a74ba87715509af9f6cb21fa21d93b02b3316ed43bda83664db9/pillow-11.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:b6123aa4a59d75f06e9dd3dac5bf8bc9aa383121bb3dd9a7a612e05eabc9961a" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/ce/1f/8d50c096a1d58ef0584ddc37e6f602828515219e9d2428e14ce50f5ecad1/pillow-11.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:a76da0a31da6fcae4210aa94fd779c65c75786bc9af06289cd1c184451ef7a65" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/dd/d6/2000bfd8d5414fb70cbbe52c8332f2283ff30ed66a9cde42716c8ecbe22c/pillow-11.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:e06695e0326d05b06833b40b7ef477e475d0b1ba3a6d27da1bb48c23209bf457" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d9/45/3fe487010dd9ce0a06adf9b8ff4f273cc0a44536e234b0fad3532a42c15b/pillow-11.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96f82000e12f23e4f29346e42702b6ed9a2f2fea34a740dd5ffffcc8c539eb35" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e3/72/776b3629c47d9d5f1c160113158a7a7ad177688d3a1159cd3b62ded5a33a/pillow-11.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3cd561ded2cf2bbae44d4605837221b987c216cff94f49dfeed63488bb228d2" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e4/c2/e25199e7e4e71d64eeb869f5b72c7ddec70e0a87926398785ab944d92375/pillow-11.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f189805c8be5ca5add39e6f899e6ce2ed824e65fb45f3c28cb2841911da19070" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c1/ed/51d6136c9d5911f78632b1b86c45241c712c5a80ed7fa7f9120a5dff1eba/pillow-11.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dd0052e9db3474df30433f83a71b9b23bd9e4ef1de13d92df21a52c0303b8ab6" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/48/a4/fbfe9d5581d7b111b28f1d8c2762dee92e9821bb209af9fa83c940e507a0/pillow-11.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:837060a8599b8f5d402e97197d4924f05a2e0d68756998345c829c33186217b1" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/39/db/0b3c1a5018117f3c1d4df671fb8e47d08937f27519e8614bbe86153b65a5/pillow-11.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aa8dd43daa836b9a8128dbe7d923423e5ad86f50a7a14dc688194b7be5c0dea2" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/d9/58/bc128da7fea8c89fc85e09f773c4901e95b5936000e6f303222490c052f3/pillow-11.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0a2f91f8a8b367e7a57c6e91cd25af510168091fb89ec5146003e424e1558a96" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/5f/bb/58f34379bde9fe197f51841c5bbe8830c28bbb6d3801f16a83b8f2ad37df/pillow-11.1.0-cp311-cp311-win32.whl", hash = "sha256:c12fc111ef090845de2bb15009372175d76ac99969bdf31e2ce9b42e4b8cd88f" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/3a/c6/fce9255272bcf0c39e15abd2f8fd8429a954cf344469eaceb9d0d1366913/pillow-11.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbd43429d0d7ed6533b25fc993861b8fd512c42d04514a0dd6337fb3ccf22761" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c8/52/8ba066d569d932365509054859f74f2a9abee273edcef5cd75e4bc3e831e/pillow-11.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f7955ecf5609dee9442cbface754f2c6e541d9e6eda87fad7f7a989b0bdb9d71" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/95/20/9ce6ed62c91c073fcaa23d216e68289e19d95fb8188b9fb7a63d36771db8/pillow-11.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2062ffb1d36544d42fcaa277b069c88b01bb7298f4efa06731a7fd6cc290b81a" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/b9/d8/f6004d98579a2596c098d1e30d10b248798cceff82d2b77aa914875bfea1/pillow-11.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a85b653980faad27e88b141348707ceeef8a1186f75ecc600c395dcac19f385b" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/08/d9/892e705f90051c7a2574d9f24579c9e100c828700d78a63239676f960b74/pillow-11.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9409c080586d1f683df3f184f20e36fb647f2e0bc3988094d4fd8c9f4eb1b3b3" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/8c/aa/7f29711f26680eab0bcd3ecdd6d23ed6bce180d82e3f6380fb7ae35fcf3b/pillow-11.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fdadc077553621911f27ce206ffcbec7d3f8d7b50e0da39f10997e8e2bb7f6a" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c8/c4/8f0fe3b9e0f7196f6d0bbb151f9fba323d72a41da068610c4c960b16632a/pillow-11.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:93a18841d09bcdd774dcdc308e4537e1f867b3dec059c131fde0327899734aa1" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/38/0d/84200ed6a871ce386ddc82904bfadc0c6b28b0c0ec78176871a4679e40b3/pillow-11.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9aa9aeddeed452b2f616ff5507459e7bab436916ccb10961c4a382cd3e03f47f" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/84/9c/9bcd66f714d7e25b64118e3952d52841a4babc6d97b6d28e2261c52045d4/pillow-11.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3cdcdb0b896e981678eee140d882b70092dac83ac1cdf6b3a60e2216a73f2b91" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/db/61/ada2a226e22da011b45f7104c95ebda1b63dcbb0c378ad0f7c2a710f8fd2/pillow-11.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36ba10b9cb413e7c7dfa3e189aba252deee0602c86c309799da5a74009ac7a1c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e7/c4/fc6e86750523f367923522014b821c11ebc5ad402e659d8c9d09b3c9d70c/pillow-11.1.0-cp312-cp312-win32.whl", hash = "sha256:cfd5cd998c2e36a862d0e27b2df63237e67273f2fc78f47445b14e73a810e7e6" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/08/5c/2104299949b9d504baf3f4d35f73dbd14ef31bbd1ddc2c1b66a5b7dfda44/pillow-11.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:a697cd8ba0383bba3d2d3ada02b34ed268cb548b369943cd349007730c92bddf" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/37/f3/9b18362206b244167c958984b57c7f70a0289bfb59a530dd8af5f699b910/pillow-11.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:4dd43a78897793f60766563969442020e90eb7847463eca901e41ba186a7d4a5" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/fa/c5/389961578fb677b8b3244fcd934f720ed25a148b9a5cc81c91bdf59d8588/pillow-11.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8c730dc3a83e5ac137fbc92dfcfe1511ce3b2b5d7578315b63dbbb76f7f51d90" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c4/fa/803c0e50ffee74d4b965229e816af55276eac1d5806712de86f9371858fd/pillow-11.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d33d2fae0e8b170b6a6c57400e077412240f6f5bb2a342cf1ee512a787942bb" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/dc/67/2a3a5f8012b5d8c63fe53958ba906c1b1d0482ebed5618057ef4d22f8076/pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8d65b38173085f24bc07f8b6c505cbb7418009fa1a1fcb111b1f4961814a442" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e5/a0/514f0d317446c98c478d1872497eb92e7cde67003fed74f696441e647446/pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:015c6e863faa4779251436db398ae75051469f7c903b043a48f078e437656f83" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/cd/00/20f40a935514037b7d3f87adfc87d2c538430ea625b63b3af8c3f5578e72/pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d44ff19eea13ae4acdaaab0179fa68c0c6f2f45d66a4d8ec1eda7d6cecbcc15f" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/28/3c/7de681727963043e093c72e6c3348411b0185eab3263100d4490234ba2f6/pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d3d8da4a631471dfaf94c10c85f5277b1f8e42ac42bade1ac67da4b4a7359b73" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/41/67/936f9814bdd74b2dfd4822f1f7725ab5d8ff4103919a1664eb4874c58b2f/pillow-11.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:4637b88343166249fe8aa94e7c4a62a180c4b3898283bb5d3d2fd5fe10d8e4e0" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/0d/8b/b158ad57ed44d3cc54db8d68ad7c0a58b8fc0e4c7a3f995f9d62d5b464a1/pillow-11.2.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:d57a75d53922fc20c165016a20d9c44f73305e67c351bbc60d1adaf662e74047" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/b1/f8/bb5d956142f86c2d6cc36704943fa761f2d2e4c48b7436fd0a85c20f1713/pillow-11.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:127bf6ac4a5b58b3d32fc8289656f77f80567d65660bc46f72c0d77e6600cc95" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/22/7f/0e413bb3e2aa797b9ca2c5c38cb2e2e45d88654e5b12da91ad446964cfae/pillow-11.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4ba4be812c7a40280629e55ae0b14a0aafa150dd6451297562e1764808bbe61" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f3/b4/cc647f4d13f3eb837d3065824aa58b9bcf10821f029dc79955ee43f793bd/pillow-11.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8bd62331e5032bc396a93609982a9ab6b411c05078a52f5fe3cc59234a3abd1" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c2/6f/240b772a3b35cdd7384166461567aa6713799b4e78d180c555bd284844ea/pillow-11.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:562d11134c97a62fe3af29581f083033179f7ff435f78392565a1ad2d1c2c45c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f3/5e/7ca9c815ade5fdca18853db86d812f2f188212792780208bdb37a0a6aef4/pillow-11.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c97209e85b5be259994eb5b69ff50c5d20cca0f458ef9abd835e262d9d88b39d" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/02/81/c3d9d38ce0c4878a77245d4cf2c46d45a4ad0f93000227910a46caff52f3/pillow-11.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0c3e6d0f59171dfa2e25d7116217543310908dfa2770aa64b8f87605f8cacc97" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/42/49/52b719b89ac7da3185b8d29c94d0e6aec8140059e3d8adcaa46da3751180/pillow-11.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc1c3bc53befb6096b84165956e886b1729634a799e9d6329a0c512ab651e579" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/5b/0b/ede75063ba6023798267023dc0d0401f13695d228194d2242d5a7ba2f964/pillow-11.2.1-cp310-cp310-win32.whl", hash = "sha256:312c77b7f07ab2139924d2639860e084ec2a13e72af54d4f08ac843a5fc9c79d" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/ed/3c/9831da3edea527c2ed9a09f31a2c04e77cd705847f13b69ca60269eec370/pillow-11.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9bc7ae48b8057a611e5fe9f853baa88093b9a76303937449397899385da06fad" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/01/97/1f66ff8a1503d8cbfc5bae4dc99d54c6ec1e22ad2b946241365320caabc2/pillow-11.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:2728567e249cdd939f6cc3d1f049595c66e4187f3c34078cbc0a7d21c47482d2" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/68/08/3fbf4b98924c73037a8e8b4c2c774784805e0fb4ebca6c5bb60795c40125/pillow-11.2.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35ca289f712ccfc699508c4658a1d14652e8033e9b69839edf83cbdd0ba39e70" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/84/92/6505b1af3d2849d5e714fc75ba9e69b7255c05ee42383a35a4d58f576b16/pillow-11.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0409af9f829f87a2dfb7e259f78f317a5351f2045158be321fd135973fff7bf" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/3c/8c/ac2f99d2a70ff966bc7eb13dacacfaab57c0549b2ffb351b6537c7840b12/pillow-11.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4e5c5edee874dce4f653dbe59db7c73a600119fbea8d31f53423586ee2aafd7" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/1f/e3/0a58b5d838687f40891fff9cbaf8669f90c96b64dc8f91f87894413856c6/pillow-11.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b93a07e76d13bff9444f1a029e0af2964e654bfc2e2c2d46bfd080df5ad5f3d8" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/21/f5/6ba14718135f08fbfa33308efe027dd02b781d3f1d5c471444a395933aac/pillow-11.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:e6def7eed9e7fa90fde255afaf08060dc4b343bbe524a8f69bdd2a2f0018f600" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/20/f2/805ad600fc59ebe4f1ba6129cd3a75fb0da126975c8579b8f57abeb61e80/pillow-11.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8f4f3724c068be008c08257207210c138d5f3731af6c155a81c2b09a9eb3a788" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/71/6b/4ef8a288b4bb2e0180cba13ca0a519fa27aa982875882392b65131401099/pillow-11.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a0a6709b47019dff32e678bc12c63008311b82b9327613f534e496dacaefb71e" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/62/ae/f29c705a09cbc9e2a456590816e5c234382ae5d32584f451c3eb41a62062/pillow-11.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f6b0c664ccb879109ee3ca702a9272d877f4fcd21e5eb63c26422fd6e415365e" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/6e/1a/c8217b6f2f73794a5e219fbad087701f412337ae6dbb956db37d69a9bc43/pillow-11.2.1-cp311-cp311-win32.whl", hash = "sha256:cc5d875d56e49f112b6def6813c4e3d3036d269c008bf8aef72cd08d20ca6df6" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e2/72/25a8f40170dc262e86e90f37cb72cb3de5e307f75bf4b02535a61afcd519/pillow-11.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:0f5c7eda47bf8e3c8a283762cab94e496ba977a420868cb819159980b6709193" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/06/9e/76825e39efee61efea258b479391ca77d64dbd9e5804e4ad0fa453b4ba55/pillow-11.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:4d375eb838755f2528ac8cbc926c3e31cc49ca4ad0cf79cff48b20e30634a4a7" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c7/40/052610b15a1b8961f52537cc8326ca6a881408bc2bdad0d852edeb6ed33b/pillow-11.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:78afba22027b4accef10dbd5eed84425930ba41b3ea0a86fa8d20baaf19d807f" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e5/7e/b86dbd35a5f938632093dc40d1682874c33dcfe832558fc80ca56bfcb774/pillow-11.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78092232a4ab376a35d68c4e6d5e00dfd73454bd12b230420025fbe178ee3b0b" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a4/5c/467a161f9ed53e5eab51a42923c33051bf8d1a2af4626ac04f5166e58e0c/pillow-11.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25a5f306095c6780c52e6bbb6109624b95c5b18e40aab1c3041da3e9e0cd3e2d" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/62/73/972b7742e38ae0e2ac76ab137ca6005dcf877480da0d9d61d93b613065b4/pillow-11.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c7b29dbd4281923a2bfe562acb734cee96bbb129e96e6972d315ed9f232bef4" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e4/3a/427e4cb0b9e177efbc1a84798ed20498c4f233abde003c06d2650a6d60cb/pillow-11.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e645b020f3209a0181a418bffe7b4a93171eef6c4ef6cc20980b30bebf17b7d" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/fe/7c/d8b1330458e4d2f3f45d9508796d7caf0c0d3764c00c823d10f6f1a3b76d/pillow-11.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b2dbea1012ccb784a65349f57bbc93730b96e85b42e9bf7b01ef40443db720b4" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/b3/2f/65738384e0b1acf451de5a573d8153fe84103772d139e1e0bdf1596be2ea/pillow-11.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:da3104c57bbd72948d75f6a9389e6727d2ab6333c3617f0a89d72d4940aa0443" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/6a/c5/e795c9f2ddf3debb2dedd0df889f2fe4b053308bb59a3cc02a0cd144d641/pillow-11.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:598174aef4589af795f66f9caab87ba4ff860ce08cd5bb447c6fc553ffee603c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/96/ae/ca0099a3995976a9fce2f423166f7bff9b12244afdc7520f6ed38911539a/pillow-11.2.1-cp312-cp312-win32.whl", hash = "sha256:1d535df14716e7f8776b9e7fee118576d65572b4aad3ed639be9e4fa88a1cad3" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/7c/18/24bff2ad716257fc03da964c5e8f05d9790a779a8895d6566e493ccf0189/pillow-11.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:14e33b28bf17c7a38eede290f77db7c664e4eb01f7869e37fa98a5aa95978941" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/da/bb/e8d656c9543276517ee40184aaa39dcb41e683bca121022f9323ae11b39d/pillow-11.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:21e1470ac9e5739ff880c211fc3af01e3ae505859392bf65458c224d0bf283eb" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/33/49/c8c21e4255b4f4a2c0c68ac18125d7f5460b109acc6dfdef1a24f9b960ef/pillow-11.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9b7b0d4fd2635f54ad82785d56bc0d94f147096493a79985d0ab57aedd563156" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/6d/f1/f7255c0838f8c1ef6d55b625cfb286835c17e8136ce4351c5577d02c443b/pillow-11.2.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:aa442755e31c64037aa7c1cb186e0b369f8416c567381852c63444dd666fb772" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/e2/57/9968114457bd131063da98d87790d080366218f64fa2943b65ac6739abb3/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0d3348c95b766f54b76116d53d4cb171b52992a1027e7ca50c81b43b9d9e363" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/b2/1b/e35d8a158e21372ecc48aac9c453518cfe23907bb82f950d6e1c72811eb0/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85d27ea4c889342f7e35f6d56e7e1cb345632ad592e8c51b693d7b7556043ce0" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/26/da/2c11d03b765efff0ccc473f1c4186dc2770110464f2177efaed9cf6fae01/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bf2c33d6791c598142f00c9c4c7d47f6476731c31081331664eb26d6ab583e01" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/79/1a/4e85bd7cadf78412c2a3069249a09c32ef3323650fd3005c97cca7aa21df/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e616e7154c37669fc1dfc14584f11e284e05d1c650e1c0f972f281c4ccc53193" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/69/03/239939915216de1e95e0ce2334bf17a7870ae185eb390fab6d706aadbfc0/pillow-11.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:39ad2e0f424394e3aebc40168845fee52df1394a4673a6ee512d840d14ab3013" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a4/ad/2613c04633c7257d9481ab21d6b5364b59fc5d75faafd7cb8693523945a3/pillow-11.2.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:80f1df8dbe9572b4b7abdfa17eb5d78dd620b1d55d9e25f834efdbee872d3aed" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/a4/fd/dcdda4471ed667de57bb5405bb42d751e6cfdd4011a12c248b455c778e03/pillow-11.2.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ea926cfbc3957090becbcbbb65ad177161a2ff2ad578b5a6ec9bb1e1cd78753c" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/ac/89/8a2536e95e77432833f0db6fd72a8d310c8e4272a04461fb833eb021bf94/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:738db0e0941ca0376804d4de6a782c005245264edaa253ffce24e5a15cbdc7bd" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/9d/8f/abd47b73c60712f88e9eda32baced7bfc3e9bd6a7619bb64b93acff28c3e/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db98ab6565c69082ec9b0d4e40dd9f6181dab0dd236d26f7a50b8b9bfbd5076" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/f6/20/5c0a0aa83b213b7a07ec01e71a3d6ea2cf4ad1d2c686cc0168173b6089e7/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:036e53f4170e270ddb8797d4c590e6dd14d28e15c7da375c18978045f7e6c37b" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/58/0e/2abab98a72202d91146abc839e10c14f7cf36166f12838ea0c4db3ca6ecb/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:14f73f7c291279bd65fda51ee87affd7c1e097709f7fdd0188957a16c264601f" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/21/2c/5e05f58658cf49b6667762cca03d6e7d85cededde2caf2ab37b81f80e574/pillow-11.2.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:208653868d5c9ecc2b327f9b9ef34e0e42a4cdd172c2988fd81d62d2bc9bc044" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -315,6 +346,12 @@ version = "0.18.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "beartype" },
|
||||
{ name = "requests" },
|
||||
]
|
||||
|
||||
[package.dev-dependencies]
|
||||
test = [
|
||||
{ name = "hypothesis" },
|
||||
{ name = "openpyxl" },
|
||||
{ name = "pillow" },
|
||||
{ name = "pytest" },
|
||||
@ -325,36 +362,36 @@ dependencies = [
|
||||
{ name = "requests-toolbelt" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
test = [
|
||||
{ name = "pytest" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "beartype", specifier = ">=0.18.5,<0.19.0" },
|
||||
{ name = "requests", specifier = ">=2.30.0,<3.0.0" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
test = [
|
||||
{ name = "hypothesis", specifier = ">=6.131.9" },
|
||||
{ name = "openpyxl", specifier = ">=3.1.5" },
|
||||
{ name = "pillow", specifier = ">=11.1.0" },
|
||||
{ name = "pytest", specifier = ">=8.0.0,<9.0.0" },
|
||||
{ name = "pytest", marker = "extra == 'test'", specifier = ">=8.0.0,<9.0.0" },
|
||||
{ name = "pytest", specifier = ">=8.3.5" },
|
||||
{ name = "python-docx", specifier = ">=1.1.2" },
|
||||
{ name = "python-pptx", specifier = ">=1.0.2" },
|
||||
{ name = "reportlab", specifier = ">=4.3.1" },
|
||||
{ name = "requests", specifier = ">=2.30.0,<3.0.0" },
|
||||
{ name = "requests", specifier = ">=2.32.3" },
|
||||
{ name = "requests-toolbelt", specifier = ">=1.0.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "reportlab"
|
||||
version = "4.3.1"
|
||||
version = "4.4.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
dependencies = [
|
||||
{ name = "chardet" },
|
||||
{ name = "pillow" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/a7/5c/9b23c8a9a69f2bc1f1268ed545f393a60b59cbe5f9d861a28b676f809729/reportlab-4.3.1.tar.gz", hash = "sha256:230f78b21667194d8490ac9d12958d5c14686352db7fbe03b95140fafdf5aa97" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/88/69/291a56d8bb177017e6e5421c34baa51b2e9017434c0ca1822e5007e45a26/reportlab-4.4.0.tar.gz", hash = "sha256:a64d85513910e246c21dc97ccc3c9054a1d44370bf8fc1fab80af937814354d5" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/ce/6b/42805895ed08a314a01be6110584b5d059328386988ddbc4f8f10014d30e/reportlab-4.3.1-py3-none-any.whl", hash = "sha256:0f37dd16652db3ef84363cf744632a28c38bd480d5bf94683466852d7bb678dd" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/52/15/4702e132ae36beb8daf3e20a92f166451148c4a89650cc9d3f19b3c66714/reportlab-4.4.0-py3-none-any.whl", hash = "sha256:0a993f1d4a765fcbdf4e26adc96b3351004ebf4d27583340595ba7edafebec32" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -384,6 +421,15 @@ wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sortedcontainers"
|
||||
version = "2.4.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.2.1"
|
||||
@ -415,27 +461,27 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.12.2"
|
||||
version = "4.13.2"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.3.0"
|
||||
version = "2.4.0"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xlsxwriter"
|
||||
version = "3.2.2"
|
||||
version = "3.2.3"
|
||||
source = { registry = "https://mirrors.aliyun.com/pypi/simple" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/a1/08/26f69d1e9264e8107253018de9fc6b96f9219817d01c5f021e927384a8d1/xlsxwriter-3.2.2.tar.gz", hash = "sha256:befc7f92578a85fed261639fb6cde1fd51b79c5e854040847dde59d4317077dc" }
|
||||
sdist = { url = "https://mirrors.aliyun.com/pypi/packages/a7/d1/e026d33dd5d552e5bf3a873dee54dad66b550230df8290d79394f09b2315/xlsxwriter-3.2.3.tar.gz", hash = "sha256:ad6fd41bdcf1b885876b1f6b7087560aecc9ae5a9cc2ba97dcac7ab2e210d3d5" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/9b/07/df054f7413bdfff5e98f75056e4ed0977d0c8716424011fac2587864d1d3/XlsxWriter-3.2.2-py3-none-any.whl", hash = "sha256:272ce861e7fa5e82a4a6ebc24511f2cb952fde3461f6c6e1a1e81d3272db1471" },
|
||||
{ url = "https://mirrors.aliyun.com/pypi/packages/37/b1/a252d499f2760b314fcf264d2b36fcc4343a1ecdb25492b210cb0db70a68/XlsxWriter-3.2.3-py3-none-any.whl", hash = "sha256:593f8296e8a91790c6d0378ab08b064f34a642b3feb787cf6738236bd0a4860d" },
|
||||
]
|
||||
|
Loading…
x
Reference in New Issue
Block a user