From ee2d4a0b897ef9a26698186c81acfedaa6f3f004 Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Tue, 14 Jan 2025 10:39:44 -0800 Subject: [PATCH] correct event loop management --- llama_stack/distribution/library_client.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/llama_stack/distribution/library_client.py b/llama_stack/distribution/library_client.py index e579b22ca..9a565bd94 100644 --- a/llama_stack/distribution/library_client.py +++ b/llama_stack/distribution/library_client.py @@ -144,15 +144,12 @@ class LlamaStackAsLibraryClient(LlamaStackClient): print(f"Removed handler {handler.__class__.__name__} from root logger") def request(self, *args, **kwargs): - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - async_response = loop.run_until_complete( - self.async_client.request(*args, **kwargs) - ) + async_response = asyncio.run(self.async_client.request(*args, **kwargs)) + if kwargs.get("stream"): - # NOTE: We are using AsyncLlamaStackClient under the hood - # We need to convert the AsyncStream from async client into - # SyncStream return type for streaming + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + def sync_generator(): try: while True: @@ -165,7 +162,6 @@ class LlamaStackAsLibraryClient(LlamaStackClient): return sync_generator() else: - loop.close() return async_response