Fix telemetry (#787)

# What does this PR do?

PR fixes couple of issues with telemetry:
1) The REST refactor changed the method from get_span_tree to
query_span_tree, which is causing the server side to return empty spans
2) Library client has introduced a new event loop, which required
changing the location of where start and end trace are called


## Test Plan

LLAMA_STACK_CONFIG="/Users/dineshyv/.llama/distributions/llamastack-fireworks/fireworks-run.yaml"
pytest -v tests/client-sdk/agents/test_agents.py -k
"test_builtin_tool_web_search"


And querying for spans from the agent run using the library client.
This commit is contained in:
Dinesh Yeduguru 2025-01-16 10:36:13 -08:00 committed by GitHub
parent 17fd2d2fd0
commit 05f6b44da7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 15 additions and 8 deletions

View file

@ -269,7 +269,7 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
set_request_provider_data( set_request_provider_data(
{"X-LlamaStack-Provider-Data": json.dumps(self.provider_data)} {"X-LlamaStack-Provider-Data": json.dumps(self.provider_data)}
) )
await start_trace(options.url, {"__location__": "library_client"})
if stream: if stream:
response = await self._call_streaming( response = await self._call_streaming(
cast_to=cast_to, cast_to=cast_to,
@ -281,7 +281,6 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
cast_to=cast_to, cast_to=cast_to,
options=options, options=options,
) )
await end_trace()
return response return response
def _find_matching_endpoint(self, method: str, path: str) -> tuple[Any, dict]: def _find_matching_endpoint(self, method: str, path: str) -> tuple[Any, dict]:
@ -323,7 +322,11 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
matched_func, path_params = self._find_matching_endpoint(options.method, path) matched_func, path_params = self._find_matching_endpoint(options.method, path)
body |= path_params body |= path_params
body = self._convert_body(path, options.method, body) body = self._convert_body(path, options.method, body)
result = await matched_func(**body) await start_trace(options.url, {"__location__": "library_client"})
try:
result = await matched_func(**body)
finally:
await end_trace()
json_content = json.dumps(convert_pydantic_to_json_value(result)) json_content = json.dumps(convert_pydantic_to_json_value(result))
mock_response = httpx.Response( mock_response = httpx.Response(
@ -366,10 +369,14 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
body = self._convert_body(path, options.method, body) body = self._convert_body(path, options.method, body)
async def gen(): async def gen():
async for chunk in await func(**body): await start_trace(options.url, {"__location__": "library_client"})
data = json.dumps(convert_pydantic_to_json_value(chunk)) try:
sse_event = f"data: {data}\n\n" async for chunk in await func(**body):
yield sse_event.encode("utf-8") data = json.dumps(convert_pydantic_to_json_value(chunk))
sse_event = f"data: {data}\n\n"
yield sse_event.encode("utf-8")
finally:
await end_trace()
mock_response = httpx.Response( mock_response = httpx.Response(
status_code=httpx.codes.OK, status_code=httpx.codes.OK,

View file

@ -53,7 +53,7 @@ class TelemetryDatasetMixin:
spans = [] spans = []
for trace in traces: for trace in traces:
spans_by_id = await self.get_span_tree( spans_by_id = await self.query_span_tree(
span_id=trace.root_span_id, span_id=trace.root_span_id,
attributes_to_return=attributes_to_return, attributes_to_return=attributes_to_return,
max_depth=max_depth, max_depth=max_depth,