mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-08-12 15:48:59 +08:00
add locally deployed llm (#841)
### What problem does this PR solve? ### Type of change - [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
parent
2b36283712
commit
a7bd427116
@ -298,4 +298,19 @@ class LocalLLM(Base):
|
||||
)
|
||||
return ans, num_tokens_from_string(ans)
|
||||
except Exception as e:
|
||||
return "**ERROR**: " + str(e), 0
|
||||
return "**ERROR**: " + str(e), 0
|
||||
|
||||
def chat_streamly(self, system, history, gen_conf):
|
||||
if system:
|
||||
history.insert(0, {"role": "system", "content": system})
|
||||
token_count = 0
|
||||
answer = ""
|
||||
try:
|
||||
for ans in self.client.chat_streamly(history, gen_conf):
|
||||
answer += ans
|
||||
token_count += 1
|
||||
yield answer
|
||||
except Exception as e:
|
||||
yield answer + "\n**ERROR**: " + str(e)
|
||||
|
||||
yield token_count
|
||||
|
Loading…
x
Reference in New Issue
Block a user