refactor: Remove unused code in large_language_model.py (#5433)

This commit is contained in:
-LAN- 2024-06-20 16:20:40 +08:00 committed by GitHub
parent 39c14ec7c1
commit 142dc0afd7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 0 additions and 48 deletions

View File

@ -489,37 +489,6 @@ if you are not sure about the structure.
"""Cut off the text as soon as any stop words occur."""
return re.split("|".join(stop), text, maxsplit=1)[0]
def _llm_result_to_stream(self, result: LLMResult) -> Generator:
"""
from typing_extensions import deprecated
Transform llm result to stream
:param result: llm result
:return: stream
"""
index = 0
tool_calls = result.message.tool_calls
for word in result.message.content:
assistant_prompt_message = AssistantPromptMessage(
content=word,
tool_calls=tool_calls if index == (len(result.message.content) - 1) else []
)
yield LLMResultChunk(
model=result.model,
prompt_messages=result.prompt_messages,
system_fingerprint=result.system_fingerprint,
delta=LLMResultChunkDelta(
index=index,
message=assistant_prompt_message,
)
)
index += 1
time.sleep(0.01)
def get_parameter_rules(self, model: str, credentials: dict) -> list[ParameterRule]:
"""
Get parameter rules

View File

@ -156,11 +156,6 @@ def test_invoke_chat_model(setup_openai_mock):
assert isinstance(result, LLMResult)
assert len(result.message.content) > 0
for chunk in model._llm_result_to_stream(result):
assert isinstance(chunk, LLMResultChunk)
assert isinstance(chunk.delta, LLMResultChunkDelta)
assert isinstance(chunk.delta.message, AssistantPromptMessage)
assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True
@pytest.mark.parametrize('setup_openai_mock', [['chat']], indirect=True)
def test_invoke_stream_chat_model(setup_openai_mock):

View File

@ -136,12 +136,6 @@ def test_invoke_chat_model():
assert isinstance(result, LLMResult)
assert len(result.message.content) > 0
for chunk in model._llm_result_to_stream(result):
assert isinstance(chunk, LLMResultChunk)
assert isinstance(chunk.delta, LLMResultChunkDelta)
assert isinstance(chunk.delta.message, AssistantPromptMessage)
assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True
def test_invoke_stream_chat_model():
model = CohereLargeLanguageModel()

View File

@ -156,12 +156,6 @@ def test_invoke_chat_model(setup_openai_mock):
assert isinstance(result, LLMResult)
assert len(result.message.content) > 0
for chunk in model._llm_result_to_stream(result):
assert isinstance(chunk, LLMResultChunk)
assert isinstance(chunk.delta, LLMResultChunkDelta)
assert isinstance(chunk.delta.message, AssistantPromptMessage)
assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True
@pytest.mark.parametrize('setup_openai_mock', [['chat']], indirect=True)
def test_invoke_chat_model_with_vision(setup_openai_mock):
model = OpenAILargeLanguageModel()