mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-08-12 22:59:02 +08:00
Add test for CI (#3114)
### What problem does this PR solve? Add test for CI ### Type of change - [x] New Feature (non-breaking change which adds functionality) Co-authored-by: liuhua <10215101452@stu.ecun.edu.cn>
This commit is contained in:
parent
5590a823c6
commit
9aeb07d830
10
.github/workflows/tests.yml
vendored
10
.github/workflows/tests.yml
vendored
@ -70,6 +70,16 @@ jobs:
|
||||
echo "RAGFLOW_IMAGE=infiniflow/ragflow:dev" >> docker/.env
|
||||
sudo docker compose -f docker/docker-compose.yml up -d
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
export http_proxy=""; export https_proxy=""; export no_proxy=""; export HTTP_PROXY=""; export HTTPS_PROXY=""; export NO_PROXY=""
|
||||
export HOST_ADDRESS=http://host.docker.internal:9380
|
||||
until sudo docker exec ragflow-server curl -s --connect-timeout 5 ${HOST_ADDRESS} > /dev/null; do
|
||||
echo "Waiting for service to be available..."
|
||||
sleep 5
|
||||
done
|
||||
cd sdk/python && poetry install && source .venv/bin/activate && cd test && pytest t_dataset.py t_chat.py t_session.py
|
||||
|
||||
- name: Stop ragflow:dev
|
||||
if: always() # always run this step even if previous steps failed
|
||||
run: |
|
||||
|
@ -41,8 +41,8 @@ def create(tenant_id):
|
||||
if kb.chunk_num == 0:
|
||||
return get_error_data_result(f"The dataset {kb_id} doesn't own parsed file")
|
||||
kbs = KnowledgebaseService.get_by_ids(ids)
|
||||
embd_count = list(set(kb.embd_id for kb in kbs))
|
||||
if embd_count != 1:
|
||||
embd_count = list(set([kb.embd_id for kb in kbs]))
|
||||
if len(embd_count) != 1:
|
||||
return get_result(retmsg='Datasets use different embedding models."',retcode=RetCode.AUTHENTICATION_ERROR)
|
||||
req["kb_ids"] = ids
|
||||
# llm
|
||||
@ -167,8 +167,8 @@ def update(tenant_id,chat_id):
|
||||
if kb.chunk_num == 0:
|
||||
return get_error_data_result(f"The dataset {kb_id} doesn't own parsed file")
|
||||
kbs = KnowledgebaseService.get_by_ids(ids)
|
||||
embd_count=list(set(kb.embd_id for kb in kbs))
|
||||
if embd_count != 1 :
|
||||
embd_count=list(set([kb.embd_id for kb in kbs]))
|
||||
if len(embd_count) != 1 :
|
||||
return get_result(
|
||||
retmsg='Datasets use different embedding models."',
|
||||
retcode=RetCode.AUTHENTICATION_ERROR)
|
||||
|
@ -120,7 +120,7 @@ def delete(tenant_id):
|
||||
if not KnowledgebaseService.delete_by_id(id):
|
||||
return get_error_data_result(
|
||||
retmsg="Delete dataset error.(Database error)")
|
||||
return get_result(retcode=RetCode.SUCCESS)
|
||||
return get_result(retcode=RetCode.SUCCESS)
|
||||
|
||||
@manager.route('/datasets/<dataset_id>', methods=['PUT'])
|
||||
@token_required
|
||||
|
@ -509,9 +509,9 @@ def rm_chunk(tenant_id,dataset_id,document_id):
|
||||
if chunk_id not in sres.ids:
|
||||
return get_error_data_result(f"Chunk {chunk_id} not found")
|
||||
if not ELASTICSEARCH.deleteByQuery(
|
||||
Q("ids", values=req["chunk_ids"]), search.index_name(tenant_id)):
|
||||
Q("ids", values=chunk_list), search.index_name(tenant_id)):
|
||||
return get_error_data_result(retmsg="Index updating failure")
|
||||
deleted_chunk_ids = req["chunk_ids"]
|
||||
deleted_chunk_ids = chunk_list
|
||||
chunk_number = len(deleted_chunk_ids)
|
||||
DocumentService.decrement_chunk_num(doc.id, doc.kb_id, 1, chunk_number, 0)
|
||||
return get_result()
|
||||
|
@ -337,7 +337,7 @@ def valid(permission,valid_permission,language,valid_language,chunk_method,valid
|
||||
|
||||
def valid_parameter(parameter,valid_values):
|
||||
if parameter and parameter not in valid_values:
|
||||
return get_error_data_result(f"`{parameter}` is not in {valid_values}")
|
||||
return get_error_data_result(f"'{parameter}' is not in {valid_values}")
|
||||
|
||||
def get_parser_config(chunk_method,parser_config):
|
||||
if parser_config:
|
||||
@ -354,6 +354,8 @@ def get_parser_config(chunk_method,parser_config):
|
||||
"laws":{"raptor":{"use_raptor":False}},
|
||||
"presentation":{"raptor":{"use_raptor":False}},
|
||||
"one":None,
|
||||
"knowledge_graph":{"chunk_token_num":8192,"delimiter":"\\n!?;。;!?","entity_types":["organization","person","location","event","time"]}}
|
||||
"knowledge_graph":{"chunk_token_num":8192,"delimiter":"\\n!?;。;!?","entity_types":["organization","person","location","event","time"]},
|
||||
"email":None,
|
||||
"picture":None}
|
||||
parser_config=key_mapping[chunk_method]
|
||||
return parser_config
|
@ -1,3 +1,3 @@
|
||||
import ragflow
|
||||
import ragflow_sdk
|
||||
|
||||
print(ragflow.__version__)
|
||||
print(ragflow_sdk.__version__)
|
97
sdk/python/poetry.lock
generated
97
sdk/python/poetry.lock
generated
@ -125,6 +125,31 @@ files = [
|
||||
{file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
description = "Cross-platform colored terminal text."
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
files = [
|
||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.2.2"
|
||||
description = "Backport of PEP 654 (exception groups)"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
|
||||
{file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
@ -139,6 +164,65 @@ files = [
|
||||
[package.extras]
|
||||
all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.0.0"
|
||||
description = "brain-dead simple config-ini parsing"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
|
||||
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "24.1"
|
||||
description = "Core utilities for Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
|
||||
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.5.0"
|
||||
description = "plugin and hook calling mechanisms for python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
|
||||
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "tox"]
|
||||
testing = ["pytest", "pytest-benchmark"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.3.3"
|
||||
description = "pytest: simple powerful testing with Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"},
|
||||
{file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
|
||||
iniconfig = "*"
|
||||
packaging = "*"
|
||||
pluggy = ">=1.5,<2"
|
||||
tomli = {version = ">=1", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.3"
|
||||
@ -160,6 +244,17 @@ urllib3 = ">=1.21.1,<3"
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.0.2"
|
||||
description = "A lil' TOML parser"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"},
|
||||
{file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.2.3"
|
||||
@ -180,4 +275,4 @@ zstd = ["zstandard (>=0.18.0)"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "3423d9502ced305145882182d8e309fa020c3ab007d9877b755dcb9d16e9da2c"
|
||||
content-hash = "202bfd3e121f1d57a2f9c9d91cd7a50eacf2362cd1995c9f6347bcb100cf9336"
|
||||
|
@ -10,6 +10,7 @@ package-mode = true
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
requests = "^2.30.0"
|
||||
pytest = "^8.0.0"
|
||||
|
||||
|
||||
[build-system]
|
||||
|
@ -1,11 +1,11 @@
|
||||
import pytest
|
||||
import requests
|
||||
import string
|
||||
import random
|
||||
import os
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
|
||||
HOST_ADDRESS = 'http://127.0.0.1:9380'
|
||||
HOST_ADDRESS = os.getenv('HOST_ADDRESS', 'http://127.0.0.1:9380')
|
||||
|
||||
def generate_random_email():
|
||||
return 'user_' + ''.join(random.choices(string.ascii_lowercase + string.digits, k=8))+'@1.com'
|
||||
|
@ -1,12 +1,14 @@
|
||||
import os
|
||||
from ragflow_sdk import RAGFlow
|
||||
HOST_ADDRESS = 'http://127.0.0.1:9380'
|
||||
|
||||
HOST_ADDRESS = os.getenv('HOST_ADDRESS', 'http://127.0.0.1:9380')
|
||||
|
||||
def test_create_chat_with_name(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_create_chat")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("./ragflow.txt","rb") as file:
|
||||
with open("ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name":displayed_name,"blob":blob}
|
||||
documents = []
|
||||
@ -22,7 +24,7 @@ def test_update_chat_with_name(get_api_key_fixture):
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_update_chat")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("./ragflow.txt", "rb") as file:
|
||||
with open("ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name": displayed_name, "blob": blob}
|
||||
documents = []
|
||||
@ -39,7 +41,7 @@ def test_delete_chats_with_success(get_api_key_fixture):
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_delete_chat")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("./ragflow.txt", "rb") as file:
|
||||
with open("ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name": displayed_name, "blob": blob}
|
||||
documents = []
|
||||
@ -53,9 +55,9 @@ def test_delete_chats_with_success(get_api_key_fixture):
|
||||
def test_list_chats_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_delete_chat")
|
||||
kb = rag.create_dataset(name="test_list_chats")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("./ragflow.txt", "rb") as file:
|
||||
with open("ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name": displayed_name, "blob": blob}
|
||||
documents = []
|
||||
|
@ -1,54 +1,55 @@
|
||||
from ragflow_sdk import RAGFlow
|
||||
import random
|
||||
import pytest
|
||||
|
||||
HOST_ADDRESS = 'http://127.0.0.1:9380'
|
||||
|
||||
def test_create_dataset_with_name(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
rag.create_dataset("test_create_dataset_with_name")
|
||||
|
||||
def test_create_dataset_with_duplicated_name(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
rag.create_dataset("test_create_dataset_with_name")
|
||||
assert str(exc_info.value) == "Duplicated dataset name in creating dataset."
|
||||
|
||||
def test_create_dataset_with_random_chunk_method(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
valid_chunk_methods = ["naive","manual","qa","table","paper","book","laws","presentation","picture","one","knowledge_graph","email"]
|
||||
random_chunk_method = random.choice(valid_chunk_methods)
|
||||
rag.create_dataset("test_create_dataset_with_random_chunk_method",chunk_method=random_chunk_method)
|
||||
|
||||
def test_create_dataset_with_invalid_parameter(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
valid_chunk_methods = ["naive", "manual", "qa", "table", "paper", "book", "laws", "presentation", "picture", "one",
|
||||
"knowledge_graph", "email"]
|
||||
chunk_method = "invalid_chunk_method"
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
rag.create_dataset("test_create_dataset_with_name",chunk_method=chunk_method)
|
||||
assert str(exc_info.value) == f"{chunk_method} is not in {valid_chunk_methods}"
|
||||
|
||||
|
||||
def test_update_dataset_with_name(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset("test_update_dataset")
|
||||
ds.update({"name": "updated_dataset"})
|
||||
|
||||
|
||||
def test_delete_datasets_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset("MA")
|
||||
rag.delete_datasets(ids=[ds.id])
|
||||
|
||||
|
||||
def test_list_datasets_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
rag.list_datasets()
|
||||
import os
|
||||
import random
|
||||
import pytest
|
||||
from ragflow_sdk import RAGFlow
|
||||
|
||||
HOST_ADDRESS = os.getenv('HOST_ADDRESS', 'http://127.0.0.1:9380')
|
||||
|
||||
def test_create_dataset_with_name(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
rag.create_dataset("test_create_dataset_with_name")
|
||||
|
||||
def test_create_dataset_with_duplicated_name(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
rag.create_dataset("test_create_dataset_with_name")
|
||||
assert str(exc_info.value) == "Duplicated dataset name in creating dataset."
|
||||
|
||||
def test_create_dataset_with_random_chunk_method(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
valid_chunk_methods = ["naive","manual","qa","table","paper","book","laws","presentation","picture","one","knowledge_graph","email"]
|
||||
random_chunk_method = random.choice(valid_chunk_methods)
|
||||
rag.create_dataset("test_create_dataset_with_random_chunk_method",chunk_method=random_chunk_method)
|
||||
|
||||
def test_create_dataset_with_invalid_parameter(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
valid_chunk_methods = ["naive", "manual", "qa", "table", "paper", "book", "laws", "presentation", "picture", "one",
|
||||
"knowledge_graph", "email"]
|
||||
chunk_method = "invalid_chunk_method"
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
rag.create_dataset("test_create_dataset_with_name",chunk_method=chunk_method)
|
||||
assert str(exc_info.value) == f"'{chunk_method}' is not in {valid_chunk_methods}"
|
||||
|
||||
|
||||
def test_update_dataset_with_name(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset("test_update_dataset")
|
||||
ds.update({"name": "updated_dataset"})
|
||||
|
||||
|
||||
def test_delete_datasets_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
ds = rag.create_dataset("MA")
|
||||
rag.delete_datasets(ids=[ds.id])
|
||||
|
||||
|
||||
def test_list_datasets_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
rag.list_datasets()
|
||||
|
@ -1,6 +1,7 @@
|
||||
import os
|
||||
from ragflow_sdk import RAGFlow, DataSet, Document, Chunk
|
||||
|
||||
HOST_ADDRESS = 'http://127.0.0.1:9380'
|
||||
HOST_ADDRESS = os.getenv('HOST_ADDRESS', 'http://127.0.0.1:9380')
|
||||
|
||||
|
||||
def test_upload_document_with_success(get_api_key_fixture):
|
||||
@ -66,7 +67,7 @@ def test_download_document_with_success(get_api_key_fixture):
|
||||
# Check if the retrieved document is of type Document
|
||||
if isinstance(doc, Document):
|
||||
# Download the document content and save it to a file
|
||||
with open("./ragflow.txt", "wb+") as file:
|
||||
with open("ragflow.txt", "wb+") as file:
|
||||
file.write(doc.download())
|
||||
# Print the document object for debugging
|
||||
print(doc)
|
||||
@ -144,7 +145,7 @@ def test_parse_and_cancel_document(get_api_key_fixture):
|
||||
|
||||
# Define the document name and path
|
||||
name3 = 'westworld.pdf'
|
||||
path = './test_data/westworld.pdf'
|
||||
path = 'test_data/westworld.pdf'
|
||||
|
||||
# Create a document in the dataset using the file path
|
||||
ds.upload_documents({"name": name3, "blob": open(path, "rb").read()})
|
||||
|
@ -1,94 +1,96 @@
|
||||
from ragflow_sdk import RAGFlow
|
||||
HOST_ADDRESS = 'http://127.0.0.1:9380'
|
||||
|
||||
|
||||
def test_create_session_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_create_session")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("./ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name":displayed_name,"blob":blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs= kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
assistant=rag.create_chat("test_create", dataset_ids=[kb.id])
|
||||
assistant.create_session()
|
||||
|
||||
|
||||
def test_create_conversation_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_create_conversation")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("./ragflow.txt","rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name": displayed_name, "blob": blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs = kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
assistant = rag.create_chat("test_create", dataset_ids=[kb.id])
|
||||
session = assistant.create_session()
|
||||
question = "What is AI"
|
||||
for ans in session.ask(question, stream=True):
|
||||
pass
|
||||
assert not ans.content.startswith("**ERROR**"), "Please check this error."
|
||||
|
||||
|
||||
def test_delete_sessions_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_delete_session")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("./ragflow.txt","rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name":displayed_name,"blob":blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs= kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
assistant=rag.create_chat("test_create", dataset_ids=[kb.id])
|
||||
session = assistant.create_session()
|
||||
assistant.delete_sessions(ids=[session.id])
|
||||
|
||||
def test_update_session_with_name(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_update_session")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("./ragflow.txt","rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name": displayed_name, "blob": blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs = kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
assistant = rag.create_chat("test_create", dataset_ids=[kb.id])
|
||||
session = assistant.create_session(name="old session")
|
||||
session.update({"name": "new session"})
|
||||
|
||||
|
||||
def test_list_sessions_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_list_session")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("./ragflow.txt","rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name":displayed_name,"blob":blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs= kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
assistant=rag.create_chat("test_create", dataset_ids=[kb.id])
|
||||
assistant.create_session("test_1")
|
||||
assistant.create_session("test_2")
|
||||
import os
|
||||
from ragflow_sdk import RAGFlow
|
||||
|
||||
HOST_ADDRESS = os.getenv('HOST_ADDRESS', 'http://127.0.0.1:9380')
|
||||
|
||||
|
||||
def test_create_session_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_create_session")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name":displayed_name,"blob":blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs= kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
assistant=rag.create_chat("test_create_session", dataset_ids=[kb.id])
|
||||
assistant.create_session()
|
||||
|
||||
|
||||
def test_create_conversation_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_create_conversation")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name": displayed_name, "blob": blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs = kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
assistant = rag.create_chat("test_create_conversation", dataset_ids=[kb.id])
|
||||
session = assistant.create_session()
|
||||
question = "What is AI"
|
||||
for ans in session.ask(question, stream=True):
|
||||
pass
|
||||
assert not ans.content.startswith("**ERROR**"), "Please check this error."
|
||||
|
||||
|
||||
def test_delete_sessions_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_delete_session")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name":displayed_name,"blob":blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs= kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
assistant=rag.create_chat("test_delete_session", dataset_ids=[kb.id])
|
||||
session = assistant.create_session()
|
||||
assistant.delete_sessions(ids=[session.id])
|
||||
|
||||
def test_update_session_with_name(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_update_session")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name": displayed_name, "blob": blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs = kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
assistant = rag.create_chat("test_update_session", dataset_ids=[kb.id])
|
||||
session = assistant.create_session(name="old session")
|
||||
session.update({"name": "new session"})
|
||||
|
||||
|
||||
def test_list_sessions_with_success(get_api_key_fixture):
|
||||
API_KEY = get_api_key_fixture
|
||||
rag = RAGFlow(API_KEY, HOST_ADDRESS)
|
||||
kb = rag.create_dataset(name="test_list_session")
|
||||
displayed_name = "ragflow.txt"
|
||||
with open("ragflow.txt", "rb") as file:
|
||||
blob = file.read()
|
||||
document = {"displayed_name":displayed_name,"blob":blob}
|
||||
documents = []
|
||||
documents.append(document)
|
||||
docs= kb.upload_documents(documents)
|
||||
for doc in docs:
|
||||
doc.add_chunk("This is a test to add chunk")
|
||||
assistant=rag.create_chat("test_list_session", dataset_ids=[kb.id])
|
||||
assistant.create_session("test_1")
|
||||
assistant.create_session("test_2")
|
||||
assistant.list_sessions()
|
Loading…
x
Reference in New Issue
Block a user