refactor(client): remove initialize() Method from LlamaStackAsLibrary

Currently client.initialize() had to be invoked by user.
To improve dev experience and to avoid runtime errors, this PR init LlamaStackAsLibrary implicitly upon using the client.
It prevents also multiple init of the same client, while maintaining backward ccompatibility.

Signed-off-by: Mustafa Elbehery <melbeher@redhat.com>
This commit is contained in:
Mustafa Elbehery 2025-07-31 14:18:45 +02:00
parent ac25e35124
commit c54278f3d7
5 changed files with 76 additions and 87 deletions

View file

@ -5,86 +5,70 @@
# the root directory of this source tree.
"""
Unit tests for LlamaStackAsLibraryClient initialization error handling.
Unit tests for LlamaStackAsLibraryClient automatic initialization.
These tests ensure that users get proper error messages when they forget to call
initialize() on the library client, preventing AttributeError regressions.
These tests ensure that the library client is automatically initialized
and ready to use immediately after construction.
"""
import pytest
from llama_stack.core.library_client import (
AsyncLlamaStackAsLibraryClient,
LlamaStackAsLibraryClient,
)
class TestLlamaStackAsLibraryClientInitialization:
"""Test proper error handling for uninitialized library clients."""
class TestLlamaStackAsLibraryClientAutoInitialization:
"""Test automatic initialization of library clients."""
@pytest.mark.parametrize(
"api_call",
[
lambda client: client.models.list(),
lambda client: client.chat.completions.create(model="test", messages=[{"role": "user", "content": "test"}]),
lambda client: next(
client.chat.completions.create(
model="test", messages=[{"role": "user", "content": "test"}], stream=True
)
),
],
ids=["models.list", "chat.completions.create", "chat.completions.create_stream"],
)
def test_sync_client_proper_error_without_initialization(self, api_call):
"""Test that sync client raises ValueError with helpful message when not initialized."""
def test_sync_client_auto_initialization(self):
"""Test that sync client is automatically initialized after construction."""
client = LlamaStackAsLibraryClient("nvidia")
with pytest.raises(ValueError) as exc_info:
api_call(client)
# Client should be automatically initialized
assert client.async_client._is_initialized is True
assert client.async_client.route_impls is not None
error_msg = str(exc_info.value)
assert "Client not initialized" in error_msg
assert "Please call initialize() first" in error_msg
@pytest.mark.parametrize(
"api_call",
[
lambda client: client.models.list(),
lambda client: client.chat.completions.create(model="test", messages=[{"role": "user", "content": "test"}]),
],
ids=["models.list", "chat.completions.create"],
)
async def test_async_client_proper_error_without_initialization(self, api_call):
"""Test that async client raises ValueError with helpful message when not initialized."""
async def test_async_client_auto_initialization(self):
"""Test that async client can be initialized and works properly."""
client = AsyncLlamaStackAsLibraryClient("nvidia")
with pytest.raises(ValueError) as exc_info:
await api_call(client)
# Initialize the client
result = await client.initialize()
assert result is True
assert client._is_initialized is True
assert client.route_impls is not None
error_msg = str(exc_info.value)
assert "Client not initialized" in error_msg
assert "Please call initialize() first" in error_msg
def test_initialize_method_backward_compatibility(self):
"""Test that initialize() method still works for backward compatibility."""
client = LlamaStackAsLibraryClient("nvidia")
async def test_async_client_streaming_error_without_initialization(self):
"""Test that async client streaming raises ValueError with helpful message when not initialized."""
# initialize() should return None (historical behavior) and not cause errors
result = client.initialize()
assert result is None
# Multiple calls should be safe
result2 = client.initialize()
assert result2 is None
async def test_async_initialize_method_idempotent(self):
"""Test that async initialize() method can be called multiple times safely."""
client = AsyncLlamaStackAsLibraryClient("nvidia")
with pytest.raises(ValueError) as exc_info:
stream = await client.chat.completions.create(
model="test", messages=[{"role": "user", "content": "test"}], stream=True
)
await anext(stream)
# First initialization
result1 = await client.initialize()
assert result1 is True
assert client._is_initialized is True
error_msg = str(exc_info.value)
assert "Client not initialized" in error_msg
assert "Please call initialize() first" in error_msg
# Second initialization should be safe and return True
result2 = await client.initialize()
assert result2 is True
assert client._is_initialized is True
def test_route_impls_initialized_to_none(self):
"""Test that route_impls is initialized to None to prevent AttributeError."""
# Test sync client
def test_route_impls_automatically_set(self):
"""Test that route_impls is automatically set during construction."""
# Test sync client - should be auto-initialized
sync_client = LlamaStackAsLibraryClient("nvidia")
assert sync_client.async_client.route_impls is None
assert sync_client.async_client.route_impls is not None
# Test async client directly
async_client = AsyncLlamaStackAsLibraryClient("nvidia")
assert async_client.route_impls is None
# Test that the async client is marked as initialized
assert sync_client.async_client._is_initialized is True