From b68d349bd6d2abb0f899e6ff71351f06687134ee Mon Sep 17 00:00:00 2001 From: liuhua <10215101452@stu.ecnu.edu.cn> Date: Thu, 26 Sep 2024 16:05:25 +0800 Subject: [PATCH] Fix: renrank_model and pdf_parser bugs | Update: session API (#2601) ### What problem does this PR solve? Fix: renrank_model and pdf_parser bugs | Update: session API #2575 #2559 ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) - [x] Refactoring --------- Co-authored-by: liuhua <10215101452@stu.ecun.edu.cn> --- api/apps/sdk/session.py | 13 +++-- deepdoc/parser/pdf_parser.py | 2 +- rag/llm/rerank_model.py | 76 ++++++++++++++++--------- sdk/python/ragflow/modules/assistant.py | 2 +- sdk/python/ragflow/modules/session.py | 14 +++-- sdk/python/test/t_session.py | 2 +- 6 files changed, 68 insertions(+), 41 deletions(-) diff --git a/api/apps/sdk/session.py b/api/apps/sdk/session.py index c6430f0c6..9fcab50b1 100644 --- a/api/apps/sdk/session.py +++ b/api/apps/sdk/session.py @@ -87,9 +87,9 @@ def completion(tenant_id): # req = {"conversation_id": "9aaaca4c11d311efa461fa163e197198", "messages": [ # {"role": "user", "content": "上海有吗?"} # ]} - if "id" not in req: - return get_data_error_result(retmsg="id is required") - conv = ConversationService.query(id=req["id"]) + if "session_id" not in req: + return get_data_error_result(retmsg="session_id is required") + conv = ConversationService.query(id=req["session_id"]) if not conv: return get_data_error_result(retmsg="Session does not exist") conv = conv[0] @@ -108,7 +108,7 @@ def completion(tenant_id): msg.append(m) message_id = msg[-1].get("id") e, dia = DialogService.get_by_id(conv.dialog_id) - del req["id"] + del req["session_id"] if not conv.reference: conv.reference = [] @@ -168,6 +168,9 @@ def get(tenant_id): return get_data_error_result(retmsg="Session does not exist") if not DialogService.query(id=conv[0].dialog_id, tenant_id=tenant_id, status=StatusEnum.VALID.value): return get_data_error_result(retmsg="You do not own the session") + if "assistant_id" in req: + if req["assistant_id"] != conv[0].dialog_id: + return get_data_error_result(retmsg="The session doesn't belong to the assistant") conv = conv[0].to_dict() conv['messages'] = conv.pop("message") conv["assistant_id"] = conv.pop("dialog_id") @@ -207,7 +210,7 @@ def list(tenant_id): assistant_id = request.args["assistant_id"] if not DialogService.query(tenant_id=tenant_id, id=assistant_id, status=StatusEnum.VALID.value): return get_json_result( - data=False, retmsg=f'Only owner of the assistant is authorized for this operation.', + data=False, retmsg=f"You don't own the assistant.", retcode=RetCode.OPERATING_ERROR) convs = ConversationService.query( dialog_id=assistant_id, diff --git a/deepdoc/parser/pdf_parser.py b/deepdoc/parser/pdf_parser.py index 2eab41298..e5268a8a4 100644 --- a/deepdoc/parser/pdf_parser.py +++ b/deepdoc/parser/pdf_parser.py @@ -488,7 +488,7 @@ class RAGFlowPdfParser: i += 1 continue - if not down["text"].strip(): + if not down["text"].strip() or not up["text"].strip(): i += 1 continue diff --git a/rag/llm/rerank_model.py b/rag/llm/rerank_model.py index 245e85a63..1d80fd0ec 100644 --- a/rag/llm/rerank_model.py +++ b/rag/llm/rerank_model.py @@ -26,9 +26,11 @@ from api.utils.file_utils import get_home_cache_dir from rag.utils import num_tokens_from_string, truncate import json + def sigmoid(x): return 1 / (1 + np.exp(-x)) + class Base(ABC): def __init__(self, key, model_name): pass @@ -59,16 +61,19 @@ class DefaultRerank(Base): with DefaultRerank._model_lock: if not DefaultRerank._model: try: - DefaultRerank._model = FlagReranker(os.path.join(get_home_cache_dir(), re.sub(r"^[a-zA-Z]+/", "", model_name)), use_fp16=torch.cuda.is_available()) + DefaultRerank._model = FlagReranker( + os.path.join(get_home_cache_dir(), re.sub(r"^[a-zA-Z]+/", "", model_name)), + use_fp16=torch.cuda.is_available()) except Exception as e: - model_dir = snapshot_download(repo_id= model_name, - local_dir=os.path.join(get_home_cache_dir(), re.sub(r"^[a-zA-Z]+/", "", model_name)), + model_dir = snapshot_download(repo_id=model_name, + local_dir=os.path.join(get_home_cache_dir(), + re.sub(r"^[a-zA-Z]+/", "", model_name)), local_dir_use_symlinks=False) DefaultRerank._model = FlagReranker(model_dir, use_fp16=torch.cuda.is_available()) self._model = DefaultRerank._model def similarity(self, query: str, texts: list): - pairs = [(query,truncate(t, 2048)) for t in texts] + pairs = [(query, truncate(t, 2048)) for t in texts] token_count = 0 for _, t in pairs: token_count += num_tokens_from_string(t) @@ -77,8 +82,10 @@ class DefaultRerank(Base): for i in range(0, len(pairs), batch_size): scores = self._model.compute_score(pairs[i:i + batch_size], max_length=2048) scores = sigmoid(np.array(scores)).tolist() - if isinstance(scores, float): res.append(scores) - else: res.extend(scores) + if isinstance(scores, float): + res.append(scores) + else: + res.extend(scores) return np.array(res), token_count @@ -101,7 +108,10 @@ class JinaRerank(Base): "top_n": len(texts) } res = requests.post(self.base_url, headers=self.headers, json=data).json() - return np.array([d["relevance_score"] for d in res["results"]]), res["usage"]["total_tokens"] + rank = np.zeros(len(texts), dtype=float) + for d in res["results"]: + rank[d["index"]] = d["relevance_score"] + return rank, res["usage"]["total_tokens"] class YoudaoRerank(DefaultRerank): @@ -124,7 +134,7 @@ class YoudaoRerank(DefaultRerank): "maidalun1020", "InfiniFlow")) self._model = YoudaoRerank._model - + def similarity(self, query: str, texts: list): pairs = [(query, truncate(t, self._model.max_length)) for t in texts] token_count = 0 @@ -135,8 +145,10 @@ class YoudaoRerank(DefaultRerank): for i in range(0, len(pairs), batch_size): scores = self._model.compute_score(pairs[i:i + batch_size], max_length=self._model.max_length) scores = sigmoid(np.array(scores)).tolist() - if isinstance(scores, float): res.append(scores) - else: res.extend(scores) + if isinstance(scores, float): + res.append(scores) + else: + res.extend(scores) return np.array(res), token_count @@ -162,7 +174,10 @@ class XInferenceRerank(Base): "documents": texts } res = requests.post(self.base_url, headers=self.headers, json=data).json() - return np.array([d["relevance_score"] for d in res["results"]]), res["meta"]["tokens"]["input_tokens"]+res["meta"]["tokens"]["output_tokens"] + rank = np.zeros(len(texts), dtype=float) + for d in res["results"]: + rank[d["index"]] = d["relevance_score"] + return rank, res["meta"]["tokens"]["input_tokens"] + res["meta"]["tokens"]["output_tokens"] class LocalAIRerank(Base): @@ -175,7 +190,7 @@ class LocalAIRerank(Base): class NvidiaRerank(Base): def __init__( - self, key, model_name, base_url="https://ai.api.nvidia.com/v1/retrieval/nvidia/" + self, key, model_name, base_url="https://ai.api.nvidia.com/v1/retrieval/nvidia/" ): if not base_url: base_url = "https://ai.api.nvidia.com/v1/retrieval/nvidia/" @@ -208,9 +223,10 @@ class NvidiaRerank(Base): "top_n": len(texts), } res = requests.post(self.base_url, headers=self.headers, json=data).json() - rank = np.array([d["logit"] for d in res["rankings"]]) - indexs = [d["index"] for d in res["rankings"]] - return rank[indexs], token_count + rank = np.zeros(len(texts), dtype=float) + for d in res["rankings"]: + rank[d["index"]] = d["logit"] + return rank, token_count class LmStudioRerank(Base): @@ -247,9 +263,10 @@ class CoHereRerank(Base): top_n=len(texts), return_documents=False, ) - rank = np.array([d.relevance_score for d in res.results]) - indexs = [d.index for d in res.results] - return rank[indexs], token_count + rank = np.zeros(len(texts), dtype=float) + for d in res.results: + rank[d.index] = d.relevance_score + return rank, token_count class TogetherAIRerank(Base): @@ -262,7 +279,7 @@ class TogetherAIRerank(Base): class SILICONFLOWRerank(Base): def __init__( - self, key, model_name, base_url="https://api.siliconflow.cn/v1/rerank" + self, key, model_name, base_url="https://api.siliconflow.cn/v1/rerank" ): if not base_url: base_url = "https://api.siliconflow.cn/v1/rerank" @@ -287,10 +304,11 @@ class SILICONFLOWRerank(Base): response = requests.post( self.base_url, json=payload, headers=self.headers ).json() - rank = np.array([d["relevance_score"] for d in response["results"]]) - indexs = [d["index"] for d in response["results"]] + rank = np.zeros(len(texts), dtype=float) + for d in response["results"]: + rank[d["index"]] = d["relevance_score"] return ( - rank[indexs], + rank, response["meta"]["tokens"]["input_tokens"] + response["meta"]["tokens"]["output_tokens"], ) @@ -312,9 +330,10 @@ class BaiduYiyanRerank(Base): documents=texts, top_n=len(texts), ).body - rank = np.array([d["relevance_score"] for d in res["results"]]) - indexs = [d["index"] for d in res["results"]] - return rank[indexs], res["usage"]["total_tokens"] + rank = np.zeros(len(texts), dtype=float) + for d in res["results"]: + rank[d["index"]] = d["relevance_score"] + return rank, res["usage"]["total_tokens"] class VoyageRerank(Base): @@ -328,6 +347,7 @@ class VoyageRerank(Base): res = self.client.rerank( query=query, documents=texts, model=self.model_name, top_k=len(texts) ) - rank = np.array([r.relevance_score for r in res.results]) - indexs = [r.index for r in res.results] - return rank[indexs], res.total_tokens + rank = np.zeros(len(texts), dtype=float) + for r in res.results: + rank[r.index] = r.relevance_score + return rank, res.total_tokens diff --git a/sdk/python/ragflow/modules/assistant.py b/sdk/python/ragflow/modules/assistant.py index 7ba630135..8be68d1ea 100644 --- a/sdk/python/ragflow/modules/assistant.py +++ b/sdk/python/ragflow/modules/assistant.py @@ -76,7 +76,7 @@ class Assistant(Base): raise Exception(res["retmsg"]) def get_session(self, id) -> Session: - res = self.get("/session/get", {"id": id}) + res = self.get("/session/get", {"id": id,"assistant_id":self.id}) res = res.json() if res.get("retmsg") == "success": return Session(self.rag, res["data"]) diff --git a/sdk/python/ragflow/modules/session.py b/sdk/python/ragflow/modules/session.py index 4958c0c18..4e109aa94 100644 --- a/sdk/python/ragflow/modules/session.py +++ b/sdk/python/ragflow/modules/session.py @@ -16,9 +16,12 @@ class Session(Base): if "reference" in message: message.pop("reference") res = self.post("/session/completion", - {"id": self.id, "question": question, "stream": stream}, stream=True) + {"session_id": self.id, "question": question, "stream": True}, stream=stream) for line in res.iter_lines(): line = line.decode("utf-8") + if line.startswith("{"): + json_data = json.loads(line) + raise Exception(json_data["retmsg"]) if line.startswith("data:"): json_data = json.loads(line[5:]) if json_data["data"] != True: @@ -69,6 +72,7 @@ class Message(Base): self.reference = None self.role = "assistant" self.prompt = None + self.id = None super().__init__(rag, res_dict) @@ -76,10 +80,10 @@ class Chunk(Base): def __init__(self, rag, res_dict): self.id = None self.content = None - self.document_id = None - self.document_name = None - self.knowledgebase_id = None - self.image_id = None + self.document_id = "" + self.document_name = "" + self.knowledgebase_id = "" + self.image_id = "" self.similarity = None self.vector_similarity = None self.term_similarity = None diff --git a/sdk/python/test/t_session.py b/sdk/python/test/t_session.py index 3fc210ffa..59d27ed81 100644 --- a/sdk/python/test/t_session.py +++ b/sdk/python/test/t_session.py @@ -19,7 +19,7 @@ class TestSession: question = "What is AI" for ans in session.chat(question, stream=True): pass - assert ans.content!="\n**ERROR**", "Please check this error." + assert not ans.content.startswith("**ERROR**"), "Please check this error." def test_delete_session_with_success(self): rag = RAGFlow(API_KEY, HOST_ADDRESS)