mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-05 18:22:41 +00:00
add comment
This commit is contained in:
parent
ee2d4a0b89
commit
a69c1b8bb1
1 changed files with 3 additions and 0 deletions
|
@ -147,6 +147,9 @@ class LlamaStackAsLibraryClient(LlamaStackClient):
|
||||||
async_response = asyncio.run(self.async_client.request(*args, **kwargs))
|
async_response = asyncio.run(self.async_client.request(*args, **kwargs))
|
||||||
|
|
||||||
if kwargs.get("stream"):
|
if kwargs.get("stream"):
|
||||||
|
# NOTE: We are using AsyncLlamaStackClient under the hood
|
||||||
|
# A new event loop is needed to convert the AsyncStream
|
||||||
|
# from async client into SyncStream return type for streaming
|
||||||
loop = asyncio.new_event_loop()
|
loop = asyncio.new_event_loop()
|
||||||
asyncio.set_event_loop(loop)
|
asyncio.set_event_loop(loop)
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue