mirror of
https://github.com/meta-llama/llama-stack.git
synced 2026-01-07 23:51:29 +00:00
fix stack trace when stack is not built.
currently this is the output when you run a distribution locally without running `llama stack build`:
```
Traceback (most recent call last):
File "/Users/charliedoern/Documents/llama-sdk.py", line 25, in <module>
models = client.models.list()
^^^^^^^^^^^^^^^^^^^^
File "/Users/charliedoern/Documents/llama-stack-client-python/src/llama_stack_client/resources/models.py", line 107, in list
raise exc
File "/Users/charliedoern/Documents/llama-stack-client-python/src/llama_stack_client/resources/models.py", line 95, in list
return self._get(
^^^^^^^^^^
File "/Users/charliedoern/Documents/llama-stack-client-python/src/llama_stack_client/_base_client.py", line 1212, in get
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/charliedoern/Documents/llama-stack/llama_stack/distribution/library_client.py", line 168, in request
return asyncio.run(self.async_client.request(*args, **kwargs))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/asyncio/runners.py", line 190, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/asyncio/runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/python@3.11/3.11.10/Frameworks/Python.framework/Versions/3.11/lib/python3.11/asyncio/base_events.py", line 654, in run_until_complete
return future.result()
^^^^^^^^^^^^^^^
File "/Users/charliedoern/Documents/llama-stack/llama_stack/distribution/library_client.py", line 258, in request
if not self.endpoint_impls:
^^^^^^^^^^^^^^^^^^^
AttributeError: 'AsyncLlamaStackAsLibraryClient' object has no attribute 'endpoint_impls'
```
the intended exception is never raised, initialize endpoint_impls properly so it is `None` and the ValueError can be raised
Signed-off-by: Charlie Doern <cdoern@redhat.com>
This commit is contained in:
parent
a79a083e39
commit
ff3cb032a6
2 changed files with 8 additions and 1 deletions
|
|
@ -134,7 +134,13 @@ client = (
|
|||
) # or create_http_client() depending on the environment you picked
|
||||
|
||||
# List available models
|
||||
models = client.models.list()
|
||||
|
||||
try:
|
||||
models = client.models.list()
|
||||
except ValueError as e:
|
||||
print(e)
|
||||
sys.exit(1)
|
||||
|
||||
print("--- Available models: ---")
|
||||
for m in models:
|
||||
print(f"- {m.identifier}")
|
||||
|
|
|
|||
|
|
@ -198,6 +198,7 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
|
|||
|
||||
async def initialize(self) -> bool:
|
||||
try:
|
||||
self.endpoint_impls = None
|
||||
self.impls = await construct_stack(self.config, self.custom_provider_registry)
|
||||
except ModuleNotFoundError as _e:
|
||||
cprint(_e.msg, "red")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue