mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-18 00:47:15 +00:00
fix(test): update LlamaStackAsLibraryClient initialization tests after removing initialize method
The recent refactor (3778a4c3) introduced automatic initialization for LlamaStackAsLibraryClient but the unit tests were expecting manual initalization and _is_initialized. This caused test failure. Changes: - Update test assertions to check route_impls is not None instead of _is_initialized - Add proper mocking in tests to avoid external provider dependencies - Maintain test coverage for automatic initialization behavior - Ensure backward compatibility testing for deprecated initialize() method Signed-off-by: Mustafa Elbehery <melbeher@redhat.com>
This commit is contained in:
parent
c54278f3d7
commit
2ebdfced3c
4 changed files with 78 additions and 27 deletions
|
|
@ -149,7 +149,6 @@ class LlamaStackAsLibraryClient(LlamaStackClient):
|
|||
config_path_or_distro_name, custom_provider_registry, provider_data, skip_logger_removal
|
||||
)
|
||||
self.pool_executor = ThreadPoolExecutor(max_workers=4)
|
||||
self.skip_logger_removal = skip_logger_removal
|
||||
self.provider_data = provider_data
|
||||
|
||||
self.loop = asyncio.new_event_loop()
|
||||
|
|
@ -247,7 +246,7 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
|
|||
|
||||
async def initialize(self) -> bool:
|
||||
"""
|
||||
Initialize the async client. Can be called multiple times safely.
|
||||
Initialize the async client.
|
||||
|
||||
Returns:
|
||||
bool: True if initialization was successful
|
||||
|
|
@ -312,6 +311,9 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
|
|||
stream=False,
|
||||
stream_cls=None,
|
||||
):
|
||||
if self.route_impls is None:
|
||||
raise ValueError("Client not initialized. Please call initialize() first.")
|
||||
|
||||
# Create headers with provider data if available
|
||||
headers = options.headers or {}
|
||||
if self.provider_data:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue