Complete DataSet SDK implementation (#2171)

### What problem does this PR solve?

Complete DataSet SDK implementation
#1102

### Type of change

- [x] New Feature (non-breaking change which adds functionality)

---------

Co-authored-by: Feiue <10215101452@stu.ecun.edu.cn>
This commit is contained in:
LiuHua 2024-08-30 16:54:22 +08:00 committed by GitHub
parent 667632ba00
commit 2c771fb0b4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 75 additions and 33 deletions

View File

@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
from flask import request
from api.db import StatusEnum, FileSource
@ -33,7 +34,7 @@ def save(tenant_id):
req = request.json
e, t = TenantService.get_by_id(tenant_id)
if "id" not in req:
if "tenant_id" in req or "embd_id" in req:
if "tenant_id" in req or "embedding_model" in req:
return get_data_error_result(
retmsg="Tenant_id or embedding_model must not be provided")
if "name" not in req:
@ -47,22 +48,39 @@ def save(tenant_id):
if KnowledgebaseService.query(name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value):
return get_data_error_result(
retmsg="Duplicated knowledgebase name in creating dataset.")
req["tenant_id"] = tenant_id
req['created_by'] = tenant_id
req['embd_id'] = t.embd_id
req["tenant_id"] = req['created_by'] = tenant_id
req['embedding_model'] = t.embd_id
key_mapping = {
"chunk_num": "chunk_count",
"doc_num": "document_count",
"parser_id": "parse_method",
"embd_id": "embedding_model"
}
mapped_keys = {new_key: req[old_key] for new_key, old_key in key_mapping.items() if old_key in req}
req.update(mapped_keys)
if not KnowledgebaseService.save(**req):
return get_data_error_result(retmsg="Create dataset error.(Database error)")
return get_json_result(data=req)
renamed_data={}
e, k = KnowledgebaseService.get_by_id(req["id"])
for key, value in k.to_dict().items():
new_key = key_mapping.get(key, key)
renamed_data[new_key] = value
return get_json_result(data=renamed_data)
else:
invalid_keys = {"embd_id", "chunk_num", "doc_num", "parser_id"}
if any(key in req for key in invalid_keys):
return get_data_error_result(retmsg="The input parameters are invalid.")
if "tenant_id" in req:
if req["tenant_id"] != tenant_id:
return get_data_error_result(
retmsg="Can't change tenant_id.")
if "embd_id" in req:
if req["embd_id"] != t.embd_id:
if "embedding_model" in req:
if req["embedding_model"] != t.embd_id:
return get_data_error_result(
retmsg="Can't change embedding_model.")
req.pop("embedding_model")
if not KnowledgebaseService.query(
created_by=tenant_id, id=req["id"]):
@ -72,20 +90,23 @@ def save(tenant_id):
e, kb = KnowledgebaseService.get_by_id(req["id"])
if "chunk_num" in req:
if req["chunk_num"] != kb.chunk_num:
if "chunk_count" in req:
if req["chunk_count"] != kb.chunk_num:
return get_data_error_result(
retmsg="Can't change chunk_count.")
req.pop("chunk_count")
if "doc_num" in req:
if req['doc_num'] != kb.doc_num:
if "document_count" in req:
if req['document_count'] != kb.doc_num:
return get_data_error_result(
retmsg="Can't change document_count.")
req.pop("document_count")
if "parser_id" in req:
if kb.chunk_num > 0 and req['parser_id'] != kb.parser_id:
if "parse_method" in req:
if kb.chunk_num != 0 and req['parse_method'] != kb.parser_id:
return get_data_error_result(
retmsg="if chunk count is not 0, parse method is not changable.")
retmsg="If chunk count is not 0, parse method is not changable.")
req['parser_id'] = req.pop('parse_method')
if "name" in req:
if req["name"].lower() != kb.name.lower() \
and len(KnowledgebaseService.query(name=req["name"], tenant_id=tenant_id,
@ -103,6 +124,9 @@ def save(tenant_id):
@token_required
def delete(tenant_id):
req = request.args
if "id" not in req:
return get_data_error_result(
retmsg="id is required")
kbs = KnowledgebaseService.query(
created_by=tenant_id, id=req["id"])
if not kbs:
@ -120,7 +144,7 @@ def delete(tenant_id):
if not KnowledgebaseService.delete_by_id(req["id"]):
return get_data_error_result(
retmsg="Delete dataset error.(Database error)")
retmsg="Delete dataset error.(Database serror)")
return get_json_result(data=True)
@ -134,37 +158,63 @@ def list_datasets(tenant_id):
tenants = TenantService.get_joined_tenants_by_user_id(tenant_id)
kbs = KnowledgebaseService.get_by_tenant_ids(
[m["tenant_id"] for m in tenants], tenant_id, page_number, items_per_page, orderby, desc)
return get_json_result(data=kbs)
renamed_list = []
for kb in kbs:
key_mapping = {
"chunk_num": "chunk_count",
"doc_num": "document_count",
"parser_id": "parse_method",
"embd_id": "embedding_model"
}
renamed_data = {}
for key, value in kb.items():
new_key = key_mapping.get(key, key)
renamed_data[new_key] = value
renamed_list.append(renamed_data)
return get_json_result(data=renamed_list)
@manager.route('/detail', methods=['GET'])
@token_required
def detail(tenant_id):
req = request.args
key_mapping = {
"chunk_num": "chunk_count",
"doc_num": "document_count",
"parser_id": "parse_method",
"embd_id": "embedding_model"
}
renamed_data = {}
if "id" in req:
id = req["id"]
kb = KnowledgebaseService.query(created_by=tenant_id, id=req["id"])
if not kb:
return get_json_result(
data=False, retmsg='You do not own the dataset',
data=False, retmsg='You do not own the dataset.',
retcode=RetCode.OPERATING_ERROR)
if "name" in req:
name = req["name"]
if kb[0].name != name:
return get_json_result(
data=False, retmsg='You do not own the dataset',
data=False, retmsg='You do not own the dataset.',
retcode=RetCode.OPERATING_ERROR)
e, k = KnowledgebaseService.get_by_id(id)
return get_json_result(data=k.to_dict())
for key, value in k.to_dict().items():
new_key = key_mapping.get(key, key)
renamed_data[new_key] = value
return get_json_result(data=renamed_data)
else:
if "name" in req:
name = req["name"]
e, k = KnowledgebaseService.get_by_name(kb_name=name, tenant_id=tenant_id)
if not e:
return get_json_result(
data=False, retmsg='You do not own the dataset',
data=False, retmsg='You do not own the dataset.',
retcode=RetCode.OPERATING_ERROR)
return get_json_result(data=k.to_dict())
for key, value in k.to_dict().items():
new_key = key_mapping.get(key, key)
renamed_data[new_key] = value
return get_json_result(data=renamed_data)
else:
return get_data_error_result(
retmsg="At least one of `id` or `name` must be provided.")

View File

@ -24,14 +24,6 @@ class DataSet(Base):
self.parse_method = "naive"
self.parser_config = None
for k in list(res_dict.keys()):
if k == "embd_id":
res_dict["embedding_model"] = res_dict[k]
if k == "parser_id":
res_dict['parse_method'] = res_dict[k]
if k == "doc_num":
res_dict["document_count"] = res_dict[k]
if k == "chunk_num":
res_dict["chunk_count"] = res_dict[k]
if k not in self.__dict__:
res_dict.pop(k)
super().__init__(rag, res_dict)
@ -39,9 +31,9 @@ class DataSet(Base):
def save(self) -> bool:
res = self.post('/dataset/save',
{"id": self.id, "name": self.name, "avatar": self.avatar, "tenant_id": self.tenant_id,
"description": self.description, "language": self.language, "embd_id": self.embedding_model,
"description": self.description, "language": self.language, "embedding_model": self.embedding_model,
"permission": self.permission,
"doc_num": self.document_count, "chunk_num": self.chunk_count, "parser_id": self.parse_method,
"document_count": self.document_count, "chunk_count": self.chunk_count, "parse_method": self.parse_method,
"parser_config": self.parser_config.to_json()
})
res = res.json()

View File

@ -52,7 +52,7 @@ class RAGFlow:
res = self.post("/dataset/save",
{"name": name, "avatar": avatar, "description": description, "language": language,
"permission": permission,
"doc_num": document_count, "chunk_num": chunk_count, "parser_id": parse_method,
"document_count": document_count, "chunk_count": chunk_count, "parse_method": parse_method,
"parser_config": parser_config
}
)
@ -61,7 +61,7 @@ class RAGFlow:
return DataSet(self, res["data"])
raise Exception(res["retmsg"])
def list_datasets(self, page: int = 1, page_size: int = 150, orderby: str = "create_time", desc: bool = True) -> \
def list_datasets(self, page: int = 1, page_size: int = 1024, orderby: str = "create_time", desc: bool = True) -> \
List[DataSet]:
res = self.get("/dataset/list", {"page": page, "page_size": page_size, "orderby": orderby, "desc": desc})
res = res.json()