This commit is contained in:
Dinesh Yeduguru 2024-11-26 15:41:08 -08:00
parent af8a1fe5b3
commit b3e149334a
3 changed files with 22 additions and 6 deletions

View file

@ -568,7 +568,13 @@ class ChatAgent(ShieldRunnerMixin):
)
)
with tracing.span("tool_execution"):
with tracing.span(
"tool_execution",
{
"tool_name": tool_call.tool_name,
"input": message.model_dump_json(),
},
) as span:
result_messages = await execute_tool_call_maybe(
self.tools_dict,
[message],
@ -577,6 +583,7 @@ class ChatAgent(ShieldRunnerMixin):
len(result_messages) == 1
), "Currently not supporting multiple messages"
result_message = result_messages[0]
span.set_attribute("output", result_message.model_dump_json())
yield AgentTurnResponseStreamChunk(
event=AgentTurnResponseEvent(

View file

@ -5,7 +5,9 @@
# the root directory of this source tree.
import json
from typing import Optional
from typing import List, Optional
from llama_stack.apis.telemetry.telemetry import Trace
from .config import LogFormat
@ -52,6 +54,11 @@ class ConsoleTelemetryImpl(Telemetry):
async def get_trace(self, trace_id: str) -> Trace:
raise NotImplementedError()
async def get_traces_for_session(
self, session_id: str, lookback: str = "1h", limit: int = 100
) -> List[Trace]:
raise NotImplementedError()
COLORS = {
"reset": "\033[0m",