From 9457d20ef1e4c85adc48783246adf32aaaa18deb Mon Sep 17 00:00:00 2001 From: Kevin Hu Date: Fri, 25 Oct 2024 10:50:44 +0800 Subject: [PATCH] make gemini robust (#3012) ### What problem does this PR solve? #3003 ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) --- rag/llm/chat_model.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rag/llm/chat_model.py b/rag/llm/chat_model.py index d6589e0ab..8e9dd8924 100644 --- a/rag/llm/chat_model.py +++ b/rag/llm/chat_model.py @@ -780,10 +780,11 @@ class GeminiChat(Base): ans += resp.text yield ans + yield response._chunks[-1].usage_metadata.total_token_count except Exception as e: yield ans + "\n**ERROR**: " + str(e) - yield response._chunks[-1].usage_metadata.total_token_count + yield 0 class GroqChat: