mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-08-14 04:15:53 +08:00
support chat model in huggingface (#2802)
### What problem does this PR solve? #2794 ### Type of change - [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
parent
1bae479b37
commit
3f065c75da
@ -107,6 +107,7 @@ ChatModel = {
|
||||
"BaiduYiyan": BaiduYiyanChat,
|
||||
"Anthropic": AnthropicChat,
|
||||
"Google Cloud": GoogleChat,
|
||||
"HuggingFace": HuggingFaceChat,
|
||||
}
|
||||
|
||||
RerankModel = {
|
||||
|
@ -104,7 +104,13 @@ class XinferenceChat(Base):
|
||||
if base_url.split("/")[-1] != "v1":
|
||||
base_url = os.path.join(base_url, "v1")
|
||||
super().__init__(key, model_name, base_url)
|
||||
|
||||
class HuggingFaceChat(Base):
|
||||
def __init__(self, key=None, model_name="", base_url=""):
|
||||
if not base_url:
|
||||
raise ValueError("Local llm url cannot be None")
|
||||
if base_url.split("/")[-1] != "v1":
|
||||
base_url = os.path.join(base_url, "v1")
|
||||
super().__init__(key, model_name, base_url)
|
||||
|
||||
class DeepSeekChat(Base):
|
||||
def __init__(self, key, model_name="deepseek-chat", base_url="https://api.deepseek.com/v1"):
|
||||
|
@ -54,7 +54,10 @@ const OllamaModal = ({
|
||||
llmFactoryToUrlMap[llmFactory as LlmFactory] ||
|
||||
'https://github.com/infiniflow/ragflow/blob/main/docs/guides/deploy_local_llm.mdx';
|
||||
const optionsMap = {
|
||||
HuggingFace: [{ value: 'embedding', label: 'embedding' }],
|
||||
HuggingFace: [
|
||||
{ value: 'embedding', label: 'embedding' },
|
||||
{ value: 'chat', label: 'chat' },
|
||||
],
|
||||
Xinference: [
|
||||
{ value: 'chat', label: 'chat' },
|
||||
{ value: 'embedding', label: 'embedding' },
|
||||
|
Loading…
x
Reference in New Issue
Block a user