mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-18 19:49:47 +00:00
fix(test): update LlamaStackAsLibraryClient initialization tests after removing initialize method
The recent refactor (3778a4c3) introduced automatic initialization for LlamaStackAsLibraryClient but the unit tests were expecting manual initalization and _is_initialized. This caused test failure. Changes: - Update test assertions to check route_impls is not None instead of _is_initialized - Add proper mocking in tests to avoid external provider dependencies - Maintain test coverage for automatic initialization behavior - Ensure backward compatibility testing for deprecated initialize() method Signed-off-by: Mustafa Elbehery <melbeher@redhat.com>
This commit is contained in:
parent
c54278f3d7
commit
2ebdfced3c
4 changed files with 78 additions and 27 deletions
|
|
@ -256,7 +256,6 @@ def instantiate_llama_stack_client(session):
|
|||
provider_data=get_provider_data(),
|
||||
skip_logger_removal=True,
|
||||
)
|
||||
# Client is automatically initialized during construction
|
||||
return client
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -113,7 +113,6 @@ def openai_client(base_url, api_key, provider):
|
|||
raise ValueError(f"Invalid config for Llama Stack: {provider}, it must be of the form 'stack:<config>'")
|
||||
config = parts[1]
|
||||
client = LlamaStackAsLibraryClient(config, skip_logger_removal=True)
|
||||
# Client is automatically initialized during construction
|
||||
return client
|
||||
|
||||
return OpenAI(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue