From 3044cb85fd8256b219561c60ecb3ebf315e27544 Mon Sep 17 00:00:00 2001 From: Kevin Hu Date: Sat, 14 Sep 2024 14:40:57 +0800 Subject: [PATCH] fix batch size error for qianwen embedding (#2431) ### What problem does this PR solve? #2402 ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) --- rag/llm/embedding_model.py | 1 + 1 file changed, 1 insertion(+) diff --git a/rag/llm/embedding_model.py b/rag/llm/embedding_model.py index fac954da7..221fea198 100644 --- a/rag/llm/embedding_model.py +++ b/rag/llm/embedding_model.py @@ -155,6 +155,7 @@ class QWenEmbed(Base): def encode(self, texts: list, batch_size=10): import dashscope + batch_size = min(batch_size, 4) try: res = [] token_count = 0