forked from phoenix-oss/llama-stack-mirror
Fix telemetry (#787)
# What does this PR do? PR fixes couple of issues with telemetry: 1) The REST refactor changed the method from get_span_tree to query_span_tree, which is causing the server side to return empty spans 2) Library client has introduced a new event loop, which required changing the location of where start and end trace are called ## Test Plan LLAMA_STACK_CONFIG="/Users/dineshyv/.llama/distributions/llamastack-fireworks/fireworks-run.yaml" pytest -v tests/client-sdk/agents/test_agents.py -k "test_builtin_tool_web_search" And querying for spans from the agent run using the library client.
This commit is contained in:
parent
17fd2d2fd0
commit
05f6b44da7
2 changed files with 15 additions and 8 deletions
|
@ -269,7 +269,7 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
|
||||||
set_request_provider_data(
|
set_request_provider_data(
|
||||||
{"X-LlamaStack-Provider-Data": json.dumps(self.provider_data)}
|
{"X-LlamaStack-Provider-Data": json.dumps(self.provider_data)}
|
||||||
)
|
)
|
||||||
await start_trace(options.url, {"__location__": "library_client"})
|
|
||||||
if stream:
|
if stream:
|
||||||
response = await self._call_streaming(
|
response = await self._call_streaming(
|
||||||
cast_to=cast_to,
|
cast_to=cast_to,
|
||||||
|
@ -281,7 +281,6 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
|
||||||
cast_to=cast_to,
|
cast_to=cast_to,
|
||||||
options=options,
|
options=options,
|
||||||
)
|
)
|
||||||
await end_trace()
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def _find_matching_endpoint(self, method: str, path: str) -> tuple[Any, dict]:
|
def _find_matching_endpoint(self, method: str, path: str) -> tuple[Any, dict]:
|
||||||
|
@ -323,7 +322,11 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
|
||||||
matched_func, path_params = self._find_matching_endpoint(options.method, path)
|
matched_func, path_params = self._find_matching_endpoint(options.method, path)
|
||||||
body |= path_params
|
body |= path_params
|
||||||
body = self._convert_body(path, options.method, body)
|
body = self._convert_body(path, options.method, body)
|
||||||
|
await start_trace(options.url, {"__location__": "library_client"})
|
||||||
|
try:
|
||||||
result = await matched_func(**body)
|
result = await matched_func(**body)
|
||||||
|
finally:
|
||||||
|
await end_trace()
|
||||||
|
|
||||||
json_content = json.dumps(convert_pydantic_to_json_value(result))
|
json_content = json.dumps(convert_pydantic_to_json_value(result))
|
||||||
mock_response = httpx.Response(
|
mock_response = httpx.Response(
|
||||||
|
@ -366,10 +369,14 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
|
||||||
body = self._convert_body(path, options.method, body)
|
body = self._convert_body(path, options.method, body)
|
||||||
|
|
||||||
async def gen():
|
async def gen():
|
||||||
|
await start_trace(options.url, {"__location__": "library_client"})
|
||||||
|
try:
|
||||||
async for chunk in await func(**body):
|
async for chunk in await func(**body):
|
||||||
data = json.dumps(convert_pydantic_to_json_value(chunk))
|
data = json.dumps(convert_pydantic_to_json_value(chunk))
|
||||||
sse_event = f"data: {data}\n\n"
|
sse_event = f"data: {data}\n\n"
|
||||||
yield sse_event.encode("utf-8")
|
yield sse_event.encode("utf-8")
|
||||||
|
finally:
|
||||||
|
await end_trace()
|
||||||
|
|
||||||
mock_response = httpx.Response(
|
mock_response = httpx.Response(
|
||||||
status_code=httpx.codes.OK,
|
status_code=httpx.codes.OK,
|
||||||
|
|
|
@ -53,7 +53,7 @@ class TelemetryDatasetMixin:
|
||||||
spans = []
|
spans = []
|
||||||
|
|
||||||
for trace in traces:
|
for trace in traces:
|
||||||
spans_by_id = await self.get_span_tree(
|
spans_by_id = await self.query_span_tree(
|
||||||
span_id=trace.root_span_id,
|
span_id=trace.root_span_id,
|
||||||
attributes_to_return=attributes_to_return,
|
attributes_to_return=attributes_to_return,
|
||||||
max_depth=max_depth,
|
max_depth=max_depth,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue