mirror of
https://git.mirrors.martin98.com/https://github.com/langgenius/dify.git
synced 2025-08-14 14:35:55 +08:00
fix: inner invoke llm token too long (#20391)
This commit is contained in:
parent
57ece83c30
commit
9bbd646f40
@ -58,6 +58,7 @@ class PluginModelBackwardsInvocation(BaseBackwardsInvocation):
|
|||||||
LLMNode.deduct_llm_quota(
|
LLMNode.deduct_llm_quota(
|
||||||
tenant_id=tenant.id, model_instance=model_instance, usage=chunk.delta.usage
|
tenant_id=tenant.id, model_instance=model_instance, usage=chunk.delta.usage
|
||||||
)
|
)
|
||||||
|
chunk.prompt_messages = []
|
||||||
yield chunk
|
yield chunk
|
||||||
|
|
||||||
return handle()
|
return handle()
|
||||||
@ -68,7 +69,7 @@ class PluginModelBackwardsInvocation(BaseBackwardsInvocation):
|
|||||||
def handle_non_streaming(response: LLMResult) -> Generator[LLMResultChunk, None, None]:
|
def handle_non_streaming(response: LLMResult) -> Generator[LLMResultChunk, None, None]:
|
||||||
yield LLMResultChunk(
|
yield LLMResultChunk(
|
||||||
model=response.model,
|
model=response.model,
|
||||||
prompt_messages=response.prompt_messages,
|
prompt_messages=[],
|
||||||
system_fingerprint=response.system_fingerprint,
|
system_fingerprint=response.system_fingerprint,
|
||||||
delta=LLMResultChunkDelta(
|
delta=LLMResultChunkDelta(
|
||||||
index=0,
|
index=0,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user