mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-17 22:49:47 +00:00
refactor(client): remove initialize() Method from LlamaStackAsLibrary
Currently client.initialize() had to be invoked by user. To improve dev experience and to avoid runtime errors, this PR init LlamaStackAsLibrary implicitly upon using the client. It prevents also multiple init of the same client, while maintaining backward ccompatibility. Signed-off-by: Mustafa Elbehery <melbeher@redhat.com>
This commit is contained in:
parent
ac25e35124
commit
c54278f3d7
5 changed files with 76 additions and 87 deletions
|
|
@ -256,9 +256,7 @@ def instantiate_llama_stack_client(session):
|
|||
provider_data=get_provider_data(),
|
||||
skip_logger_removal=True,
|
||||
)
|
||||
if not client.initialize():
|
||||
raise RuntimeError("Initialization failed")
|
||||
|
||||
# Client is automatically initialized during construction
|
||||
return client
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -113,8 +113,7 @@ def openai_client(base_url, api_key, provider):
|
|||
raise ValueError(f"Invalid config for Llama Stack: {provider}, it must be of the form 'stack:<config>'")
|
||||
config = parts[1]
|
||||
client = LlamaStackAsLibraryClient(config, skip_logger_removal=True)
|
||||
if not client.initialize():
|
||||
raise RuntimeError("Initialization failed")
|
||||
# Client is automatically initialized during construction
|
||||
return client
|
||||
|
||||
return OpenAI(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue