mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-07-31 23:52:00 +08:00
Test: Add list chunk checkpoint for the add chunk API (#6482)
### What problem does this PR solve? Add list chunk checkpoint for the add chunk API ### Type of change - [x] update test cases
This commit is contained in:
parent
05997e8215
commit
27989eb9a5
@ -191,8 +191,8 @@ def list_chunks(auth, dataset_id, document_id, params=None):
|
|||||||
|
|
||||||
|
|
||||||
def batch_add_chunks(auth, dataset_id, document_id, num):
|
def batch_add_chunks(auth, dataset_id, document_id, num):
|
||||||
ids = []
|
chunk_ids = []
|
||||||
for i in range(num):
|
for i in range(num):
|
||||||
res = add_chunk(auth, dataset_id, document_id, {"content": f"ragflow test {i}"})
|
res = add_chunk(auth, dataset_id, document_id, {"content": f"ragflow test {i}"})
|
||||||
ids.append(res["data"]["chunk"]["id"])
|
chunk_ids.append(res["data"]["chunk"]["id"])
|
||||||
return ids
|
return chunk_ids
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from common import INVALID_API_TOKEN, add_chunk, delete_documnet
|
from common import INVALID_API_TOKEN, add_chunk, delete_documnet, list_chunks
|
||||||
from libs.auth import RAGFlowHttpApiAuth
|
from libs.auth import RAGFlowHttpApiAuth
|
||||||
|
|
||||||
|
|
||||||
@ -69,10 +69,14 @@ class TestAddChunk:
|
|||||||
)
|
)
|
||||||
def test_content(self, get_http_api_auth, get_dataset_id_and_document_id, payload, expected_code, expected_message):
|
def test_content(self, get_http_api_auth, get_dataset_id_and_document_id, payload, expected_code, expected_message):
|
||||||
dataset_id, document_id = get_dataset_id_and_document_id
|
dataset_id, document_id = get_dataset_id_and_document_id
|
||||||
|
res = list_chunks(get_http_api_auth, dataset_id, document_id)
|
||||||
|
chunks_count = res["data"]["doc"]["chunk_count"]
|
||||||
res = add_chunk(get_http_api_auth, dataset_id, document_id, payload)
|
res = add_chunk(get_http_api_auth, dataset_id, document_id, payload)
|
||||||
assert res["code"] == expected_code
|
assert res["code"] == expected_code
|
||||||
if expected_code == 0:
|
if expected_code == 0:
|
||||||
validate_chunk_details(dataset_id, document_id, payload, res)
|
validate_chunk_details(dataset_id, document_id, payload, res)
|
||||||
|
res = list_chunks(get_http_api_auth, dataset_id, document_id)
|
||||||
|
assert res["data"]["doc"]["chunk_count"] == chunks_count + 1
|
||||||
else:
|
else:
|
||||||
assert res["message"] == expected_message
|
assert res["message"] == expected_message
|
||||||
|
|
||||||
@ -93,10 +97,14 @@ class TestAddChunk:
|
|||||||
)
|
)
|
||||||
def test_important_keywords(self, get_http_api_auth, get_dataset_id_and_document_id, payload, expected_code, expected_message):
|
def test_important_keywords(self, get_http_api_auth, get_dataset_id_and_document_id, payload, expected_code, expected_message):
|
||||||
dataset_id, document_id = get_dataset_id_and_document_id
|
dataset_id, document_id = get_dataset_id_and_document_id
|
||||||
|
res = list_chunks(get_http_api_auth, dataset_id, document_id)
|
||||||
|
chunks_count = res["data"]["doc"]["chunk_count"]
|
||||||
res = add_chunk(get_http_api_auth, dataset_id, document_id, payload)
|
res = add_chunk(get_http_api_auth, dataset_id, document_id, payload)
|
||||||
assert res["code"] == expected_code
|
assert res["code"] == expected_code
|
||||||
if expected_code == 0:
|
if expected_code == 0:
|
||||||
validate_chunk_details(dataset_id, document_id, payload, res)
|
validate_chunk_details(dataset_id, document_id, payload, res)
|
||||||
|
res = list_chunks(get_http_api_auth, dataset_id, document_id)
|
||||||
|
assert res["data"]["doc"]["chunk_count"] == chunks_count + 1
|
||||||
else:
|
else:
|
||||||
assert res["message"] == expected_message
|
assert res["message"] == expected_message
|
||||||
|
|
||||||
@ -118,10 +126,14 @@ class TestAddChunk:
|
|||||||
)
|
)
|
||||||
def test_questions(self, get_http_api_auth, get_dataset_id_and_document_id, payload, expected_code, expected_message):
|
def test_questions(self, get_http_api_auth, get_dataset_id_and_document_id, payload, expected_code, expected_message):
|
||||||
dataset_id, document_id = get_dataset_id_and_document_id
|
dataset_id, document_id = get_dataset_id_and_document_id
|
||||||
|
res = list_chunks(get_http_api_auth, dataset_id, document_id)
|
||||||
|
chunks_count = res["data"]["doc"]["chunk_count"]
|
||||||
res = add_chunk(get_http_api_auth, dataset_id, document_id, payload)
|
res = add_chunk(get_http_api_auth, dataset_id, document_id, payload)
|
||||||
assert res["code"] == expected_code
|
assert res["code"] == expected_code
|
||||||
if expected_code == 0:
|
if expected_code == 0:
|
||||||
validate_chunk_details(dataset_id, document_id, payload, res)
|
validate_chunk_details(dataset_id, document_id, payload, res)
|
||||||
|
res = list_chunks(get_http_api_auth, dataset_id, document_id)
|
||||||
|
assert res["data"]["doc"]["chunk_count"] == chunks_count + 1
|
||||||
else:
|
else:
|
||||||
assert res["message"] == expected_message
|
assert res["message"] == expected_message
|
||||||
|
|
||||||
@ -169,13 +181,19 @@ class TestAddChunk:
|
|||||||
def test_repeated_add_chunk(self, get_http_api_auth, get_dataset_id_and_document_id):
|
def test_repeated_add_chunk(self, get_http_api_auth, get_dataset_id_and_document_id):
|
||||||
payload = {"content": "a"}
|
payload = {"content": "a"}
|
||||||
dataset_id, document_id = get_dataset_id_and_document_id
|
dataset_id, document_id = get_dataset_id_and_document_id
|
||||||
|
res = list_chunks(get_http_api_auth, dataset_id, document_id)
|
||||||
|
chunks_count = res["data"]["doc"]["chunk_count"]
|
||||||
res = add_chunk(get_http_api_auth, dataset_id, document_id, payload)
|
res = add_chunk(get_http_api_auth, dataset_id, document_id, payload)
|
||||||
assert res["code"] == 0
|
assert res["code"] == 0
|
||||||
validate_chunk_details(dataset_id, document_id, payload, res)
|
validate_chunk_details(dataset_id, document_id, payload, res)
|
||||||
|
res = list_chunks(get_http_api_auth, dataset_id, document_id)
|
||||||
|
assert res["data"]["doc"]["chunk_count"] == chunks_count + 1
|
||||||
|
|
||||||
res = add_chunk(get_http_api_auth, dataset_id, document_id, payload)
|
res = add_chunk(get_http_api_auth, dataset_id, document_id, payload)
|
||||||
assert res["code"] == 0
|
assert res["code"] == 0
|
||||||
validate_chunk_details(dataset_id, document_id, payload, res)
|
validate_chunk_details(dataset_id, document_id, payload, res)
|
||||||
|
res = list_chunks(get_http_api_auth, dataset_id, document_id)
|
||||||
|
assert res["data"]["doc"]["chunk_count"] == chunks_count + 2
|
||||||
|
|
||||||
def test_add_chunk_to_deleted_document(self, get_http_api_auth, get_dataset_id_and_document_id):
|
def test_add_chunk_to_deleted_document(self, get_http_api_auth, get_dataset_id_and_document_id):
|
||||||
dataset_id, document_id = get_dataset_id_and_document_id
|
dataset_id, document_id = get_dataset_id_and_document_id
|
||||||
@ -188,6 +206,8 @@ class TestAddChunk:
|
|||||||
def test_concurrent_add_chunk(self, get_http_api_auth, get_dataset_id_and_document_id):
|
def test_concurrent_add_chunk(self, get_http_api_auth, get_dataset_id_and_document_id):
|
||||||
chunk_num = 50
|
chunk_num = 50
|
||||||
dataset_id, document_id = get_dataset_id_and_document_id
|
dataset_id, document_id = get_dataset_id_and_document_id
|
||||||
|
res = list_chunks(get_http_api_auth, dataset_id, document_id)
|
||||||
|
chunks_count = res["data"]["doc"]["chunk_count"]
|
||||||
|
|
||||||
with ThreadPoolExecutor(max_workers=5) as executor:
|
with ThreadPoolExecutor(max_workers=5) as executor:
|
||||||
futures = [
|
futures = [
|
||||||
@ -202,3 +222,5 @@ class TestAddChunk:
|
|||||||
]
|
]
|
||||||
responses = [f.result() for f in futures]
|
responses = [f.result() for f in futures]
|
||||||
assert all(r["code"] == 0 for r in responses)
|
assert all(r["code"] == 0 for r in responses)
|
||||||
|
res = list_chunks(get_http_api_auth, dataset_id, document_id)
|
||||||
|
assert res["data"]["doc"]["chunk_count"] == chunks_count + chunk_num
|
||||||
|
Loading…
x
Reference in New Issue
Block a user