mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-26 22:19:49 +00:00
library client fix since we need a runloop for stack construction which can create forever running background threads
This commit is contained in:
parent
3cda82be3a
commit
487e073378
1 changed files with 7 additions and 1 deletions
|
@ -161,7 +161,13 @@ class LlamaStackAsLibraryClient(LlamaStackClient):
|
||||||
if not self.skip_logger_removal:
|
if not self.skip_logger_removal:
|
||||||
self._remove_root_logger_handlers()
|
self._remove_root_logger_handlers()
|
||||||
|
|
||||||
return self.loop.run_until_complete(self.async_client.initialize())
|
# use a new event loop to avoid interfering with the main event loop
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
|
try:
|
||||||
|
return loop.run_until_complete(self.async_client.initialize())
|
||||||
|
finally:
|
||||||
|
asyncio.set_event_loop(None)
|
||||||
|
|
||||||
def _remove_root_logger_handlers(self):
|
def _remove_root_logger_handlers(self):
|
||||||
"""
|
"""
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue