Skip to content

Commit

Permalink
feat: ops trace add llm model (langgenius#7306)
Browse files Browse the repository at this point in the history
  • Loading branch information
ZhouhaoJiang authored and JunXu01 committed Nov 9, 2024
1 parent 4d4fbc7 commit d97fad0
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 3 deletions.
1 change: 1 addition & 0 deletions api/core/ops/langfuse_trace/langfuse_trace.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,7 @@ def workflow_trace(self, trace_info: WorkflowTraceInfo):
node_generation_data = LangfuseGeneration(
name="llm",
trace_id=trace_id,
model=process_data.get("model_name"),
parent_observation_id=node_execution_id,
start_time=created_at,
end_time=finished_at,
Expand Down
9 changes: 7 additions & 2 deletions api/core/ops/langsmith_trace/langsmith_trace.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,8 +139,7 @@ def workflow_trace(self, trace_info: WorkflowTraceInfo):
json.loads(node_execution.execution_metadata) if node_execution.execution_metadata else {}
)
node_total_tokens = execution_metadata.get("total_tokens", 0)

metadata = json.loads(node_execution.execution_metadata) if node_execution.execution_metadata else {}
metadata = execution_metadata.copy()
metadata.update(
{
"workflow_run_id": trace_info.workflow_run_id,
Expand All @@ -156,6 +155,12 @@ def workflow_trace(self, trace_info: WorkflowTraceInfo):
process_data = json.loads(node_execution.process_data) if node_execution.process_data else {}
if process_data and process_data.get("model_mode") == "chat":
run_type = LangSmithRunType.llm
metadata.update(
{
'ls_provider': process_data.get('model_provider', ''),
'ls_model_name': process_data.get('model_name', ''),
}
)
elif node_type == "knowledge-retrieval":
run_type = LangSmithRunType.retriever
else:
Expand Down
4 changes: 3 additions & 1 deletion api/core/workflow/nodes/llm/llm_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,9 @@ def _run(self, variable_pool: VariablePool) -> NodeRunResult:
'prompts': PromptMessageUtil.prompt_messages_to_prompt_for_saving(
model_mode=model_config.mode,
prompt_messages=prompt_messages
)
),
'model_provider': model_config.provider,
'model_name': model_config.model,
}

# handle invoke result
Expand Down

0 comments on commit d97fad0

Please sign in to comment.