forked from phoenix-oss/llama-stack-mirror
if client.initialize fails, the example should exit (#954)
# What does this PR do? the example script can gracefully exit if the boolean returned from initialize is used properly Signed-off-by: Charlie Doern <cdoern@redhat.com>
This commit is contained in:
parent
981bb52b59
commit
26aef50bc5
2 changed files with 5 additions and 2 deletions
|
@ -98,6 +98,7 @@ llama-stack-client \
|
|||
Here is a simple example to perform chat completions using the SDK.
|
||||
```python
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def create_http_client():
|
||||
|
@ -112,7 +113,9 @@ def create_library_client(template="ollama"):
|
|||
from llama_stack import LlamaStackAsLibraryClient
|
||||
|
||||
client = LlamaStackAsLibraryClient(template)
|
||||
client.initialize()
|
||||
if not client.initialize():
|
||||
print("llama stack not built properly")
|
||||
sys.exit(1)
|
||||
return client
|
||||
|
||||
|
||||
|
|
|
@ -196,7 +196,7 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
|
|||
self.custom_provider_registry = custom_provider_registry
|
||||
self.provider_data = provider_data
|
||||
|
||||
async def initialize(self):
|
||||
async def initialize(self) -> bool:
|
||||
try:
|
||||
self.impls = await construct_stack(self.config, self.custom_provider_registry)
|
||||
except ModuleNotFoundError as _e:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue