From 2ab8bc679fb3f558c989745965b5299d229a2ef9 Mon Sep 17 00:00:00 2001 From: aiscrm <30600079+aiscrm@users.noreply.github.com> Date: Mon, 7 Oct 2024 18:03:30 +0800 Subject: [PATCH] fix: Missing model information in llm span of Langfuse #9029 (#9030) Co-authored-by: corel --- api/core/ops/langfuse_trace/langfuse_trace.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/api/core/ops/langfuse_trace/langfuse_trace.py b/api/core/ops/langfuse_trace/langfuse_trace.py index 6aefbec9aa..171e34f8cb 100644 --- a/api/core/ops/langfuse_trace/langfuse_trace.py +++ b/api/core/ops/langfuse_trace/langfuse_trace.py @@ -159,6 +159,16 @@ class LangFuseDataTrace(BaseTraceInstance): "status": status, } ) + process_data = json.loads(node_execution.process_data) if node_execution.process_data else {} + model_provider = process_data.get("model_provider", None) + model_name = process_data.get("model_name", None) + if model_provider is not None and model_name is not None: + metadata.update( + { + "model_provider": model_provider, + "model_name": model_name, + } + ) # add span if trace_info.message_id: @@ -191,7 +201,6 @@ class LangFuseDataTrace(BaseTraceInstance): self.add_span(langfuse_span_data=span_data) - process_data = json.loads(node_execution.process_data) if node_execution.process_data else {} if process_data and process_data.get("model_mode") == "chat": total_token = metadata.get("total_tokens", 0) # add generation