Update displayed_name to display_name (#4311)

### What problem does this PR solve?

Update displayed_name to display_name

### Type of change

- [x] Refactoring

Co-authored-by: liuhua <10215101452@stu.ecun.edu.cn>
This commit is contained in:
liuhua 2024-12-31 17:25:24 +08:00 committed by GitHub
parent e5b1511c66
commit 419b546f03
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 53 additions and 53 deletions

View File

@ -894,7 +894,7 @@ dataset = rag_object.list_datasets(name="ragflow")
dataset = dataset[0]
name = 'ragflow_test.txt'
path = './test_data/ragflow_test.txt'
documents =[{"displayed_name":"test_retrieve_chunks.txt","blob":open(path, "rb").read()}]
documents =[{"display_name":"test_retrieve_chunks.txt","blob":open(path, "rb").read()}]
docs = dataset.upload_documents(documents)
doc = docs[0]
doc.add_chunk(content="This is a chunk addition test")

View File

@ -36,7 +36,7 @@ class DataSet(Base):
def upload_documents(self,document_list: list[dict]):
url = f"/datasets/{self.id}/documents"
files = [("file",(ele["displayed_name"],ele["blob"])) for ele in document_list]
files = [("file",(ele["display_name"],ele["blob"])) for ele in document_list]
res = self.post(path=url,json=None,files=files)
res = res.json()
if res.get("code") == 0:

View File

@ -5,10 +5,10 @@ def test_create_chat_with_name(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_create_chat")
displayed_name = "ragflow.txt"
display_name = "ragflow.txt"
with open("test_data/ragflow.txt", "rb") as file:
blob = file.read()
document = {"displayed_name":displayed_name,"blob":blob}
document = {"display_name":display_name,"blob":blob}
documents = []
documents.append(document)
docs= kb.upload_documents(documents)
@ -21,10 +21,10 @@ def test_update_chat_with_name(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_update_chat")
displayed_name = "ragflow.txt"
display_name = "ragflow.txt"
with open("test_data/ragflow.txt", "rb") as file:
blob = file.read()
document = {"displayed_name": displayed_name, "blob": blob}
document = {"display_name": display_name, "blob": blob}
documents = []
documents.append(document)
docs = kb.upload_documents(documents)
@ -38,10 +38,10 @@ def test_delete_chats_with_success(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_delete_chat")
displayed_name = "ragflow.txt"
display_name = "ragflow.txt"
with open("test_data/ragflow.txt", "rb") as file:
blob = file.read()
document = {"displayed_name": displayed_name, "blob": blob}
document = {"display_name": display_name, "blob": blob}
documents = []
documents.append(document)
docs = kb.upload_documents(documents)
@ -54,10 +54,10 @@ def test_list_chats_with_success(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_list_chats")
displayed_name = "ragflow.txt"
display_name = "ragflow.txt"
with open("test_data/ragflow.txt", "rb") as file:
blob = file.read()
document = {"displayed_name": displayed_name, "blob": blob}
document = {"display_name": display_name, "blob": blob}
documents = []
documents.append(document)
docs = kb.upload_documents(documents)

View File

@ -9,7 +9,7 @@ def test_parse_document_with_txt(get_api_key_fixture):
name = 'ragflow_test.txt'
with open("test_data/ragflow_test.txt", "rb") as file :
blob = file.read()
docs = ds.upload_documents([{"displayed_name": name, "blob": blob}])
docs = ds.upload_documents([{"display_name": name, "blob": blob}])
doc = docs[0]
ds.async_parse_documents(document_ids=[doc.id])
'''
@ -28,7 +28,7 @@ def test_parse_and_cancel_document(get_api_key_fixture):
name = 'ragflow_test.txt'
with open("test_data/ragflow_test.txt", "rb") as file :
blob = file.read()
docs=ds.upload_documents([{"displayed_name": name, "blob": blob}])
docs=ds.upload_documents([{"display_name": name, "blob": blob}])
doc = docs[0]
ds.async_parse_documents(document_ids=[doc.id])
sleep(1)
@ -43,9 +43,9 @@ def test_bulk_parse_documents(get_api_key_fixture):
with open("test_data/ragflow.txt", "rb") as file:
blob = file.read()
documents = [
{'displayed_name': 'test1.txt', 'blob': blob},
{'displayed_name': 'test2.txt', 'blob': blob},
{'displayed_name': 'test3.txt', 'blob': blob}
{'display_name': 'test1.txt', 'blob': blob},
{'display_name': 'test2.txt', 'blob': blob},
{'display_name': 'test3.txt', 'blob': blob}
]
docs = ds.upload_documents(documents)
ids = [doc.id for doc in docs]
@ -70,10 +70,10 @@ def test_list_chunks_with_success(get_api_key_fixture):
# chunk_size = 1024 * 1024
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
documents = [
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
{'display_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
]
'''
documents =[{"displayed_name":"test_list_chunks_with_success.txt","blob":blob}]
documents =[{"display_name":"test_list_chunks_with_success.txt","blob":blob}]
docs = ds.upload_documents(documents)
ids = [doc.id for doc in docs]
ds.async_parse_documents(ids)
@ -100,10 +100,10 @@ def test_add_chunk_with_success(get_api_key_fixture):
# chunk_size = 1024 * 1024
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
documents = [
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
{'display_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
]
'''
documents =[{"displayed_name":"test_list_chunks_with_success.txt","blob":blob}]
documents =[{"display_name":"test_list_chunks_with_success.txt","blob":blob}]
docs = ds.upload_documents(documents)
doc = docs[0]
doc.add_chunk(content="This is a chunk addition test")
@ -119,10 +119,10 @@ def test_delete_chunk_with_success(get_api_key_fixture):
# chunk_size = 1024 * 1024
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
documents = [
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
{'display_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
]
'''
documents =[{"displayed_name":"test_delete_chunk_with_success.txt","blob":blob}]
documents =[{"display_name":"test_delete_chunk_with_success.txt","blob":blob}]
docs = ds.upload_documents(documents)
doc = docs[0]
chunk = doc.add_chunk(content="This is a chunk addition test")
@ -140,10 +140,10 @@ def test_update_chunk_content(get_api_key_fixture):
# chunk_size = 1024 * 1024
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
documents = [
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
{'display_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
]
'''
documents =[{"displayed_name":"test_update_chunk_content_with_success.txt","blob":blob}]
documents =[{"display_name":"test_update_chunk_content_with_success.txt","blob":blob}]
docs = ds.upload_documents(documents)
doc = docs[0]
chunk = doc.add_chunk(content="This is a chunk addition test")
@ -161,10 +161,10 @@ def test_update_chunk_available(get_api_key_fixture):
# chunk_size = 1024 * 1024
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
documents = [
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
{'display_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
]
'''
documents =[{"displayed_name":"test_update_chunk_available_with_success.txt","blob":blob}]
documents =[{"display_name":"test_update_chunk_available_with_success.txt","blob":blob}]
docs = ds.upload_documents(documents)
doc = docs[0]
chunk = doc.add_chunk(content="This is a chunk addition test")
@ -183,10 +183,10 @@ def test_retrieve_chunks(get_api_key_fixture):
# chunk_size = 1024 * 1024
# chunks = [blob[i:i + chunk_size] for i in range(0, len(blob), chunk_size)]
documents = [
{'displayed_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
{'display_name': f'chunk_{i}.txt', 'blob': chunk} for i, chunk in enumerate(chunks)
]
'''
documents =[{"displayed_name":"test_retrieve_chunks.txt","blob":blob}]
documents =[{"display_name":"test_retrieve_chunks.txt","blob":blob}]
docs = ds.upload_documents(documents)
doc = docs[0]
doc.add_chunk(content="This is a chunk addition test")

View File

@ -10,8 +10,8 @@ def test_upload_document_with_success(get_api_key_fixture):
with open("test_data/ragflow.txt", "rb") as file:
blob_2=file.read()
document_infos = []
document_infos.append({"displayed_name": "test_1.txt","blob": blob})
document_infos.append({"displayed_name": "test_2.txt","blob": blob_2})
document_infos.append({"display_name": "test_1.txt","blob": blob})
document_infos.append({"display_name": "test_2.txt","blob": blob_2})
ds.upload_documents(document_infos)
@ -20,7 +20,7 @@ def test_update_document_with_success(get_api_key_fixture):
rag = RAGFlow(API_KEY, HOST_ADDRESS)
ds = rag.create_dataset(name="test_update_document")
blob = b"Sample document content for test."
document_infos=[{"displayed_name":"test.txt","blob":blob}]
document_infos=[{"display_name":"test.txt","blob":blob}]
docs=ds.upload_documents(document_infos)
doc = docs[0]
doc.update({"chunk_method": "manual", "name": "manual.txt"})
@ -31,7 +31,7 @@ def test_download_document_with_success(get_api_key_fixture):
rag = RAGFlow(API_KEY, HOST_ADDRESS)
ds = rag.create_dataset(name="test_download_document")
blob = b"Sample document content for test."
document_infos=[{"displayed_name": "test_1.txt","blob": blob}]
document_infos=[{"display_name": "test_1.txt","blob": blob}]
docs=ds.upload_documents(document_infos)
doc = docs[0]
with open("test_download.txt","wb+") as file:
@ -43,7 +43,7 @@ def test_list_documents_in_dataset_with_success(get_api_key_fixture):
rag = RAGFlow(API_KEY, HOST_ADDRESS)
ds = rag.create_dataset(name="test_list_documents")
blob = b"Sample document content for test."
document_infos = [{"displayed_name": "test.txt","blob":blob}]
document_infos = [{"display_name": "test.txt","blob":blob}]
ds.upload_documents(document_infos)
ds.list_documents(keywords="test", page=1, page_size=12)
@ -54,7 +54,7 @@ def test_delete_documents_in_dataset_with_success(get_api_key_fixture):
ds = rag.create_dataset(name="test_delete_documents")
name = "test_delete_documents.txt"
blob = b"Sample document content for test."
document_infos=[{"displayed_name": name, "blob": blob}]
document_infos=[{"display_name": name, "blob": blob}]
docs = ds.upload_documents(document_infos)
ds.delete_documents([docs[0].id])
@ -65,7 +65,7 @@ def test_upload_and_parse_pdf_documents_with_general_parse_method(get_api_key_fi
ds = rag.create_dataset(name="test_pdf_document")
with open("test_data/test.pdf", "rb") as file:
blob=file.read()
document_infos = [{"displayed_name": "test.pdf","blob": blob}]
document_infos = [{"display_name": "test.pdf","blob": blob}]
docs=ds.upload_documents(document_infos)
doc = docs[0]
ds.async_parse_documents([doc.id])
@ -76,7 +76,7 @@ def test_upload_and_parse_docx_documents_with_general_parse_method(get_api_key_f
ds = rag.create_dataset(name="test_docx_document")
with open("test_data/test.docx", "rb") as file:
blob=file.read()
document_infos = [{"displayed_name": "test.docx","blob": blob}]
document_infos = [{"display_name": "test.docx","blob": blob}]
docs=ds.upload_documents(document_infos)
doc = docs[0]
ds.async_parse_documents([doc.id])
@ -86,7 +86,7 @@ def test_upload_and_parse_excel_documents_with_general_parse_method(get_api_key_
ds = rag.create_dataset(name="test_excel_document")
with open("test_data/test.xlsx", "rb") as file:
blob=file.read()
document_infos = [{"displayed_name": "test.xlsx","blob": blob}]
document_infos = [{"display_name": "test.xlsx","blob": blob}]
docs=ds.upload_documents(document_infos)
doc = docs[0]
ds.async_parse_documents([doc.id])
@ -96,7 +96,7 @@ def test_upload_and_parse_ppt_documents_with_general_parse_method(get_api_key_fi
ds = rag.create_dataset(name="test_ppt_document")
with open("test_data/test.ppt", "rb") as file:
blob=file.read()
document_infos = [{"displayed_name": "test.ppt","blob": blob}]
document_infos = [{"display_name": "test.ppt","blob": blob}]
docs=ds.upload_documents(document_infos)
doc = docs[0]
ds.async_parse_documents([doc.id])
@ -106,7 +106,7 @@ def test_upload_and_parse_image_documents_with_general_parse_method(get_api_key_
ds = rag.create_dataset(name="test_image_document")
with open("test_data/test.jpg", "rb") as file:
blob=file.read()
document_infos = [{"displayed_name": "test.jpg","blob": blob}]
document_infos = [{"display_name": "test.jpg","blob": blob}]
docs=ds.upload_documents(document_infos)
doc = docs[0]
ds.async_parse_documents([doc.id])
@ -116,7 +116,7 @@ def test_upload_and_parse_txt_documents_with_general_parse_method(get_api_key_fi
ds = rag.create_dataset(name="test_txt_document")
with open("test_data/test.txt", "rb") as file:
blob=file.read()
document_infos = [{"displayed_name": "test.txt","blob": blob}]
document_infos = [{"display_name": "test.txt","blob": blob}]
docs=ds.upload_documents(document_infos)
doc = docs[0]
ds.async_parse_documents([doc.id])
@ -126,7 +126,7 @@ def test_upload_and_parse_md_documents_with_general_parse_method(get_api_key_fix
ds = rag.create_dataset(name="test_md_document")
with open("test_data/test.md", "rb") as file:
blob=file.read()
document_infos = [{"displayed_name": "test.md","blob": blob}]
document_infos = [{"display_name": "test.md","blob": blob}]
docs=ds.upload_documents(document_infos)
doc = docs[0]
ds.async_parse_documents([doc.id])
@ -137,7 +137,7 @@ def test_upload_and_parse_json_documents_with_general_parse_method(get_api_key_f
ds = rag.create_dataset(name="test_json_document")
with open("test_data/test.json", "rb") as file:
blob=file.read()
document_infos = [{"displayed_name": "test.json","blob": blob}]
document_infos = [{"display_name": "test.json","blob": blob}]
docs=ds.upload_documents(document_infos)
doc = docs[0]
ds.async_parse_documents([doc.id])
@ -149,7 +149,7 @@ def test_upload_and_parse_eml_documents_with_general_parse_method(get_api_key_fi
ds = rag.create_dataset(name="test_eml_document")
with open("test_data/test.eml", "rb") as file:
blob=file.read()
document_infos = [{"displayed_name": "test.eml","blob": blob}]
document_infos = [{"display_name": "test.eml","blob": blob}]
docs=ds.upload_documents(document_infos)
doc = docs[0]
ds.async_parse_documents([doc.id])
@ -160,7 +160,7 @@ def test_upload_and_parse_html_documents_with_general_parse_method(get_api_key_f
ds = rag.create_dataset(name="test_html_document")
with open("test_data/test.html", "rb") as file:
blob=file.read()
document_infos = [{"displayed_name": "test.html","blob": blob}]
document_infos = [{"display_name": "test.html","blob": blob}]
docs=ds.upload_documents(document_infos)
doc = docs[0]
ds.async_parse_documents([doc.id])

View File

@ -7,10 +7,10 @@ def test_create_session_with_success(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_create_session")
displayed_name = "ragflow.txt"
display_name = "ragflow.txt"
with open("test_data/ragflow.txt", "rb") as file:
blob = file.read()
document = {"displayed_name":displayed_name,"blob":blob}
document = {"display_name":display_name,"blob":blob}
documents = []
documents.append(document)
docs= kb.upload_documents(documents)
@ -24,10 +24,10 @@ def test_create_conversation_with_success(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_create_conversation")
displayed_name = "ragflow.txt"
display_name = "ragflow.txt"
with open("test_data/ragflow.txt", "rb") as file:
blob = file.read()
document = {"displayed_name": displayed_name, "blob": blob}
document = {"display_name": display_name, "blob": blob}
documents = []
documents.append(document)
docs = kb.upload_documents(documents)
@ -46,10 +46,10 @@ def test_delete_sessions_with_success(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_delete_session")
displayed_name = "ragflow.txt"
display_name = "ragflow.txt"
with open("test_data/ragflow.txt", "rb") as file:
blob = file.read()
document = {"displayed_name":displayed_name,"blob":blob}
document = {"display_name":display_name,"blob":blob}
documents = []
documents.append(document)
docs= kb.upload_documents(documents)
@ -64,10 +64,10 @@ def test_update_session_with_name(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_update_session")
displayed_name = "ragflow.txt"
display_name = "ragflow.txt"
with open("test_data/ragflow.txt", "rb") as file:
blob = file.read()
document = {"displayed_name": displayed_name, "blob": blob}
document = {"display_name": display_name, "blob": blob}
documents = []
documents.append(document)
docs = kb.upload_documents(documents)
@ -82,10 +82,10 @@ def test_list_sessions_with_success(get_api_key_fixture):
API_KEY = get_api_key_fixture
rag = RAGFlow(API_KEY, HOST_ADDRESS)
kb = rag.create_dataset(name="test_list_session")
displayed_name = "ragflow.txt"
display_name = "ragflow.txt"
with open("test_data/ragflow.txt", "rb") as file:
blob = file.read()
document = {"displayed_name":displayed_name,"blob":blob}
document = {"display_name":display_name,"blob":blob}
documents = []
documents.append(document)
docs= kb.upload_documents(documents)