dify/api/core/llm/streamable_open_ai.py
2023-05-15 08:51:32 +08:00

21 lines
616 B
Python

from langchain.schema import LLMResult
from typing import Optional, List
from langchain import OpenAI
from core.llm.error_handle_wraps import handle_llm_exceptions, handle_llm_exceptions_async
class StreamableOpenAI(OpenAI):
@handle_llm_exceptions
def generate(
self, prompts: List[str], stop: Optional[List[str]] = None
) -> LLMResult:
return super().generate(prompts, stop)
@handle_llm_exceptions_async
async def agenerate(
self, prompts: List[str], stop: Optional[List[str]] = None
) -> LLMResult:
return await super().agenerate(prompts, stop)