Small fix to library client

This commit is contained in:
Ashwin Bharambe 2024-12-16 14:08:30 -08:00
parent 5e08812bcb
commit eb37fba9da
2 changed files with 3 additions and 1 deletions

View file

@ -102,7 +102,7 @@ Make sure you have done `pip install llama-stack` and have the Llama Stack CLI a
export LLAMA_STACK_PORT=5001
llama stack build --template ollama --image-type conda
llama stack run ./distributions/ollama/run.yaml \
llama stack run ./run.yaml \
--port $LLAMA_STACK_PORT \
--env INFERENCE_MODEL=$INFERENCE_MODEL \
--env OLLAMA_URL=http://localhost:11434

View file

@ -257,6 +257,8 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
endpoints = get_all_api_endpoints()
endpoint_impls = {}
for api, api_endpoints in endpoints.items():
if api not in self.impls:
continue
for endpoint in api_endpoints:
impl = self.impls[api]
func = getattr(impl, endpoint.name)