fix batch size error for qianwen embedding (#2431)

### What problem does this PR solve?

#2402

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
Kevin Hu 2024-09-14 14:40:57 +08:00 committed by GitHub
parent d3262ca378
commit 3044cb85fd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -155,6 +155,7 @@ class QWenEmbed(Base):
def encode(self, texts: list, batch_size=10):
import dashscope
batch_size = min(batch_size, 4)
try:
res = []
token_count = 0