Fix error response (#3719)

### What problem does this PR solve?



### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)

---------

Co-authored-by: Jin Hai <haijin.chn@gmail.com>
This commit is contained in:
Kevin Hu 2024-11-28 18:56:10 +08:00 committed by GitHub
parent 4e8e4fe53f
commit 91f1814a87
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 7 additions and 1 deletions

View File

@ -96,7 +96,7 @@ def get():
kb_ids = KnowledgebaseService.get_kb_ids(tenant_id)
chunk = settings.docStoreConn.get(chunk_id, search.index_name(tenant_id), kb_ids)
if chunk is None:
return server_error_response("Chunk not found")
return server_error_response(Exception("Chunk not found"))
k = []
for n in chunk.keys():
if re.search(r"(_vec$|_sm_|_tks|_ltks)", n):

View File

@ -158,6 +158,8 @@ class XInferenceRerank(Base):
def __init__(self, key="xxxxxxx", model_name="", base_url=""):
if base_url.find("/v1") == -1:
base_url = urljoin(base_url, "/v1/rerank")
if base_url.find("/rerank") == -1:
base_url = urljoin(base_url, "/v1/rerank")
self.model_name = model_name
self.base_url = base_url
self.headers = {

View File

@ -4,6 +4,7 @@ import pytest
import random
import string
def test_dataset(get_auth):
# create dataset
res = create_dataset(get_auth, "test_create_dataset")
@ -58,6 +59,7 @@ def test_dataset_1k_dataset(get_auth):
assert res.get("code") == 0, f"{res.get('message')}"
print(f"{len(dataset_list)} datasets are deleted")
def test_duplicated_name_dataset(get_auth):
# create dataset
for i in range(20):
@ -81,6 +83,7 @@ def test_duplicated_name_dataset(get_auth):
assert res.get("code") == 0, f"{res.get('message')}"
print(f"{len(dataset_list)} datasets are deleted")
def test_invalid_name_dataset(get_auth):
# create dataset
# with pytest.raises(Exception) as e:
@ -99,6 +102,7 @@ def test_invalid_name_dataset(get_auth):
assert res['code'] == 102
print(res)
def test_update_different_params_dataset(get_auth):
# create dataset
res = create_dataset(get_auth, "test_create_dataset")