mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-08-18 05:25:52 +08:00
Test: Skip unstable 'stop parse documents' test cases (#6310)
### What problem does this PR solve? Skip unstable 'stop parse documents' test cases ### Type of change - [x] update test cases
This commit is contained in:
parent
9611185eb4
commit
4eb3a8e1cc
@ -60,13 +60,16 @@ class TestAuthorization:
|
|||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_invalid_auth(self, get_http_api_auth, auth, expected_code, expected_message):
|
def test_invalid_auth(
|
||||||
|
self, get_http_api_auth, auth, expected_code, expected_message
|
||||||
|
):
|
||||||
ids = create_datasets(get_http_api_auth, 1)
|
ids = create_datasets(get_http_api_auth, 1)
|
||||||
res = stop_parse_documnet(auth, ids[0])
|
res = stop_parse_documnet(auth, ids[0])
|
||||||
assert res["code"] == expected_code
|
assert res["code"] == expected_code
|
||||||
assert res["message"] == expected_message
|
assert res["message"] == expected_message
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip
|
||||||
class TestDocumentsParseStop:
|
class TestDocumentsParseStop:
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"payload, expected_code, expected_message",
|
"payload, expected_code, expected_message",
|
||||||
@ -98,7 +101,9 @@ class TestDocumentsParseStop:
|
|||||||
(lambda r: {"document_ids": r}, 0, ""),
|
(lambda r: {"document_ids": r}, 0, ""),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_basic_scenarios(self, get_http_api_auth, tmp_path, payload, expected_code, expected_message):
|
def test_basic_scenarios(
|
||||||
|
self, get_http_api_auth, tmp_path, payload, expected_code, expected_message
|
||||||
|
):
|
||||||
@wait_for(10, 1, "Document parsing timeout")
|
@wait_for(10, 1, "Document parsing timeout")
|
||||||
def condition(_auth, _dataset_id, _document_ids):
|
def condition(_auth, _dataset_id, _document_ids):
|
||||||
for _document_id in _document_ids:
|
for _document_id in _document_ids:
|
||||||
@ -109,7 +114,9 @@ class TestDocumentsParseStop:
|
|||||||
|
|
||||||
ids = create_datasets(get_http_api_auth, 1)
|
ids = create_datasets(get_http_api_auth, 1)
|
||||||
dataset_id = ids[0]
|
dataset_id = ids[0]
|
||||||
document_ids = batch_upload_documents(get_http_api_auth, dataset_id, 3, tmp_path)
|
document_ids = batch_upload_documents(
|
||||||
|
get_http_api_auth, dataset_id, 3, tmp_path
|
||||||
|
)
|
||||||
parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
||||||
|
|
||||||
if callable(payload):
|
if callable(payload):
|
||||||
@ -120,10 +127,16 @@ class TestDocumentsParseStop:
|
|||||||
if expected_code != 0:
|
if expected_code != 0:
|
||||||
assert res["message"] == expected_message
|
assert res["message"] == expected_message
|
||||||
else:
|
else:
|
||||||
completed_document_ids = list(set(document_ids) - set(payload["document_ids"]))
|
completed_document_ids = list(
|
||||||
|
set(document_ids) - set(payload["document_ids"])
|
||||||
|
)
|
||||||
condition(get_http_api_auth, dataset_id, completed_document_ids)
|
condition(get_http_api_auth, dataset_id, completed_document_ids)
|
||||||
validate_document_parse_cancel(get_http_api_auth, dataset_id, payload["document_ids"])
|
validate_document_parse_cancel(
|
||||||
validate_document_parse_done(get_http_api_auth, dataset_id, completed_document_ids)
|
get_http_api_auth, dataset_id, payload["document_ids"]
|
||||||
|
)
|
||||||
|
validate_document_parse_done(
|
||||||
|
get_http_api_auth, dataset_id, completed_document_ids
|
||||||
|
)
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"dataset_id, expected_code, expected_message",
|
"dataset_id, expected_code, expected_message",
|
||||||
@ -146,7 +159,9 @@ class TestDocumentsParseStop:
|
|||||||
):
|
):
|
||||||
ids = create_datasets(get_http_api_auth, 1)
|
ids = create_datasets(get_http_api_auth, 1)
|
||||||
document_ids = batch_upload_documents(get_http_api_auth, ids[0], 1, tmp_path)
|
document_ids = batch_upload_documents(get_http_api_auth, ids[0], 1, tmp_path)
|
||||||
res = stop_parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
res = stop_parse_documnet(
|
||||||
|
get_http_api_auth, dataset_id, {"document_ids": document_ids}
|
||||||
|
)
|
||||||
assert res["code"] == expected_code
|
assert res["code"] == expected_code
|
||||||
assert res["message"] == expected_message
|
assert res["message"] == expected_message
|
||||||
|
|
||||||
@ -159,10 +174,14 @@ class TestDocumentsParseStop:
|
|||||||
lambda r: {"document_ids": r + ["invalid_id"]},
|
lambda r: {"document_ids": r + ["invalid_id"]},
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_stop_parse_partial_invalid_document_id(self, get_http_api_auth, tmp_path, payload):
|
def test_stop_parse_partial_invalid_document_id(
|
||||||
|
self, get_http_api_auth, tmp_path, payload
|
||||||
|
):
|
||||||
ids = create_datasets(get_http_api_auth, 1)
|
ids = create_datasets(get_http_api_auth, 1)
|
||||||
dataset_id = ids[0]
|
dataset_id = ids[0]
|
||||||
document_ids = batch_upload_documents(get_http_api_auth, dataset_id, 3, tmp_path)
|
document_ids = batch_upload_documents(
|
||||||
|
get_http_api_auth, dataset_id, 3, tmp_path
|
||||||
|
)
|
||||||
parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
||||||
|
|
||||||
if callable(payload):
|
if callable(payload):
|
||||||
@ -175,12 +194,18 @@ class TestDocumentsParseStop:
|
|||||||
def test_repeated_stop_parse(self, get_http_api_auth, tmp_path):
|
def test_repeated_stop_parse(self, get_http_api_auth, tmp_path):
|
||||||
ids = create_datasets(get_http_api_auth, 1)
|
ids = create_datasets(get_http_api_auth, 1)
|
||||||
dataset_id = ids[0]
|
dataset_id = ids[0]
|
||||||
document_ids = batch_upload_documents(get_http_api_auth, dataset_id, 1, tmp_path)
|
document_ids = batch_upload_documents(
|
||||||
|
get_http_api_auth, dataset_id, 1, tmp_path
|
||||||
|
)
|
||||||
parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
||||||
res = stop_parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
res = stop_parse_documnet(
|
||||||
|
get_http_api_auth, dataset_id, {"document_ids": document_ids}
|
||||||
|
)
|
||||||
assert res["code"] == 0
|
assert res["code"] == 0
|
||||||
|
|
||||||
res = stop_parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
res = stop_parse_documnet(
|
||||||
|
get_http_api_auth, dataset_id, {"document_ids": document_ids}
|
||||||
|
)
|
||||||
assert res["code"] == 102
|
assert res["code"] == 102
|
||||||
assert res["message"] == "Can't stop parsing document with progress at 0 or 1"
|
assert res["message"] == "Can't stop parsing document with progress at 0 or 1"
|
||||||
|
|
||||||
@ -188,9 +213,13 @@ class TestDocumentsParseStop:
|
|||||||
def test_duplicate_stop_parse(self, get_http_api_auth, tmp_path):
|
def test_duplicate_stop_parse(self, get_http_api_auth, tmp_path):
|
||||||
ids = create_datasets(get_http_api_auth, 1)
|
ids = create_datasets(get_http_api_auth, 1)
|
||||||
dataset_id = ids[0]
|
dataset_id = ids[0]
|
||||||
document_ids = batch_upload_documents(get_http_api_auth, dataset_id, 1, tmp_path)
|
document_ids = batch_upload_documents(
|
||||||
|
get_http_api_auth, dataset_id, 1, tmp_path
|
||||||
|
)
|
||||||
parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
||||||
res = stop_parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids + document_ids})
|
res = stop_parse_documnet(
|
||||||
|
get_http_api_auth, dataset_id, {"document_ids": document_ids + document_ids}
|
||||||
|
)
|
||||||
assert res["code"] == 0
|
assert res["code"] == 0
|
||||||
assert res["success_count"] == 1
|
assert res["success_count"] == 1
|
||||||
assert f"Duplicate document ids: {document_ids[0]}" in res["data"]["errors"]
|
assert f"Duplicate document ids: {document_ids[0]}" in res["data"]["errors"]
|
||||||
@ -200,9 +229,13 @@ class TestDocumentsParseStop:
|
|||||||
document_num = 100
|
document_num = 100
|
||||||
ids = create_datasets(get_http_api_auth, 1)
|
ids = create_datasets(get_http_api_auth, 1)
|
||||||
dataset_id = ids[0]
|
dataset_id = ids[0]
|
||||||
document_ids = batch_upload_documents(get_http_api_auth, dataset_id, document_num, tmp_path)
|
document_ids = batch_upload_documents(
|
||||||
|
get_http_api_auth, dataset_id, document_num, tmp_path
|
||||||
|
)
|
||||||
parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
||||||
res = stop_parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
res = stop_parse_documnet(
|
||||||
|
get_http_api_auth, dataset_id, {"document_ids": document_ids}
|
||||||
|
)
|
||||||
assert res["code"] == 0
|
assert res["code"] == 0
|
||||||
validate_document_parse_cancel(get_http_api_auth, dataset_id, document_ids)
|
validate_document_parse_cancel(get_http_api_auth, dataset_id, document_ids)
|
||||||
|
|
||||||
@ -211,7 +244,9 @@ class TestDocumentsParseStop:
|
|||||||
document_num = 50
|
document_num = 50
|
||||||
ids = create_datasets(get_http_api_auth, 1)
|
ids = create_datasets(get_http_api_auth, 1)
|
||||||
dataset_id = ids[0]
|
dataset_id = ids[0]
|
||||||
document_ids = batch_upload_documents(get_http_api_auth, dataset_id, document_num, tmp_path)
|
document_ids = batch_upload_documents(
|
||||||
|
get_http_api_auth, dataset_id, document_num, tmp_path
|
||||||
|
)
|
||||||
parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
parse_documnet(get_http_api_auth, dataset_id, {"document_ids": document_ids})
|
||||||
|
|
||||||
with ThreadPoolExecutor(max_workers=5) as executor:
|
with ThreadPoolExecutor(max_workers=5) as executor:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user