diff --git a/llama_stack/distribution/build_conda_env.sh b/llama_stack/distribution/build_conda_env.sh index ff9c26e5e..31b3e1b21 100755 --- a/llama_stack/distribution/build_conda_env.sh +++ b/llama_stack/distribution/build_conda_env.sh @@ -125,7 +125,7 @@ ensure_conda_env_python310() { fi printf "Installing from LLAMA_MODELS_DIR: $LLAMA_MODELS_DIR\n" - uv pip uninstall -y llama-models + uv pip uninstall llama-models uv pip install --no-cache-dir -e "$LLAMA_MODELS_DIR" fi diff --git a/llama_stack/distribution/build_venv.sh b/llama_stack/distribution/build_venv.sh index 3166c07f6..3cb290bb7 100755 --- a/llama_stack/distribution/build_venv.sh +++ b/llama_stack/distribution/build_venv.sh @@ -89,7 +89,7 @@ run() { fi printf "Installing from LLAMA_MODELS_DIR: $LLAMA_MODELS_DIR\n" - uv pip uninstall -y llama-models + uv pip uninstall llama-models uv pip install --no-cache-dir -e "$LLAMA_MODELS_DIR" fi diff --git a/llama_stack/providers/utils/memory/vector_store.py b/llama_stack/providers/utils/memory/vector_store.py index d35f3e516..310db18b0 100644 --- a/llama_stack/providers/utils/memory/vector_store.py +++ b/llama_stack/providers/utils/memory/vector_store.py @@ -17,9 +17,6 @@ import httpx import numpy as np from llama_models.llama3.api.tokenizer import Tokenizer -from numpy.typing import NDArray - -from pypdf import PdfReader from llama_stack.apis.common.content_types import ( InterleavedContent, @@ -33,6 +30,9 @@ from llama_stack.providers.datatypes import Api from llama_stack.providers.utils.inference.prompt_adapter import ( interleaved_content_as_str, ) +from numpy.typing import NDArray + +from pypdf import PdfReader log = logging.getLogger(__name__)