mirror of
https://git.mirrors.martin98.com/https://github.com/langgenius/dify.git
synced 2025-08-13 02:19:03 +08:00
fix: Azure AI Foundry model cannot be used in the workflow (#13323)
Signed-off-by: -LAN- <laipz8200@outlook.com>
This commit is contained in:
parent
3f42fabff8
commit
f9515901cc
@ -1,5 +1,5 @@
|
|||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
from enum import Enum
|
from enum import StrEnum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
@ -8,7 +8,7 @@ from core.model_runtime.entities.message_entities import AssistantPromptMessage,
|
|||||||
from core.model_runtime.entities.model_entities import ModelUsage, PriceInfo
|
from core.model_runtime.entities.model_entities import ModelUsage, PriceInfo
|
||||||
|
|
||||||
|
|
||||||
class LLMMode(Enum):
|
class LLMMode(StrEnum):
|
||||||
"""
|
"""
|
||||||
Enum class for large language model mode.
|
Enum class for large language model mode.
|
||||||
"""
|
"""
|
||||||
|
@ -4,6 +4,7 @@ from typing import Any, Optional
|
|||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field, field_validator
|
||||||
|
|
||||||
from core.model_runtime.entities import ImagePromptMessageContent
|
from core.model_runtime.entities import ImagePromptMessageContent
|
||||||
|
from core.model_runtime.entities.llm_entities import LLMMode
|
||||||
from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate, MemoryConfig
|
from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate, MemoryConfig
|
||||||
from core.workflow.entities.variable_entities import VariableSelector
|
from core.workflow.entities.variable_entities import VariableSelector
|
||||||
from core.workflow.nodes.base import BaseNodeData
|
from core.workflow.nodes.base import BaseNodeData
|
||||||
@ -12,7 +13,7 @@ from core.workflow.nodes.base import BaseNodeData
|
|||||||
class ModelConfig(BaseModel):
|
class ModelConfig(BaseModel):
|
||||||
provider: str
|
provider: str
|
||||||
name: str
|
name: str
|
||||||
mode: str
|
mode: LLMMode = LLMMode.COMPLETION
|
||||||
completion_params: dict[str, Any] = {}
|
completion_params: dict[str, Any] = {}
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user