mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 02:53:30 +00:00
await end_trace in libcli
This commit is contained in:
parent
bc1fddf1df
commit
7615da78b8
1 changed files with 2 additions and 2 deletions
|
@ -253,7 +253,7 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
|
||||||
body = self._convert_body(path, body)
|
body = self._convert_body(path, body)
|
||||||
return await func(**body)
|
return await func(**body)
|
||||||
finally:
|
finally:
|
||||||
end_trace()
|
await end_trace()
|
||||||
|
|
||||||
async def _call_streaming(self, path: str, method: str, body: dict = None):
|
async def _call_streaming(self, path: str, method: str, body: dict = None):
|
||||||
await start_trace(path, {"__location__": "library_client"})
|
await start_trace(path, {"__location__": "library_client"})
|
||||||
|
@ -266,7 +266,7 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
|
||||||
async for chunk in await func(**body):
|
async for chunk in await func(**body):
|
||||||
yield chunk
|
yield chunk
|
||||||
finally:
|
finally:
|
||||||
end_trace()
|
await end_trace()
|
||||||
|
|
||||||
def _convert_body(self, path: str, body: Optional[dict] = None) -> dict:
|
def _convert_body(self, path: str, body: Optional[dict] = None) -> dict:
|
||||||
if not body:
|
if not body:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue