add locally deployed llm (#841)

### What problem does this PR solve?


### Type of change

- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
KevinHuSh 2024-05-20 12:40:59 +08:00 committed by GitHub
parent 2b36283712
commit a7bd427116
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -298,4 +298,19 @@ class LocalLLM(Base):
) )
return ans, num_tokens_from_string(ans) return ans, num_tokens_from_string(ans)
except Exception as e: except Exception as e:
return "**ERROR**: " + str(e), 0 return "**ERROR**: " + str(e), 0
def chat_streamly(self, system, history, gen_conf):
if system:
history.insert(0, {"role": "system", "content": system})
token_count = 0
answer = ""
try:
for ans in self.client.chat_streamly(history, gen_conf):
answer += ans
token_count += 1
yield answer
except Exception as e:
yield answer + "\n**ERROR**: " + str(e)
yield token_count