diff --git a/pyproject.toml b/pyproject.toml index 3c49e7896..4ec83249c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,6 +51,11 @@ dependencies = [ "sqlalchemy[asyncio]>=2.0.41", # server - for conversations ] +[project.optional-dependencies] +client = [ + "llama-stack-client>=0.3.0", # Optional for library-only usage +] + [dependency-groups] dev = [ "pytest>=8.4", diff --git a/src/llama_stack/core/library_client.py b/src/llama_stack/core/library_client.py index 42d5d279c..b8f9f715f 100644 --- a/src/llama_stack/core/library_client.py +++ b/src/llama_stack/core/library_client.py @@ -30,7 +30,7 @@ try: ) except ImportError as e: raise ImportError( - "llama-stack-client is not installed. Please install it with `pip install llama-stack-client`." + "llama-stack-client is not installed. Please install it with `uv pip install llama-stack[client]`." ) from e from pydantic import BaseModel, TypeAdapter diff --git a/uv.lock b/uv.lock index 07a8f8f62..ba9a862a3 100644 --- a/uv.lock +++ b/uv.lock @@ -1963,6 +1963,11 @@ dependencies = [ { name = "uvicorn" }, ] +[package.optional-dependencies] +client = [ + { name = "llama-stack-client" }, +] + [package.dev-dependencies] benchmark = [ { name = "locust" }, @@ -2089,7 +2094,7 @@ requires-dist = [ { name = "httpx" }, { name = "jinja2", specifier = ">=3.1.6" }, { name = "jsonschema" }, - { name = "llama-stack-client", specifier = ">=0.3.0" }, + { name = "llama-stack-client", marker = "extra == 'client'", specifier = ">=0.3.0" }, { name = "openai", specifier = ">=2.5.0" }, { name = "opentelemetry-exporter-otlp-proto-http", specifier = ">=1.30.0" }, { name = "opentelemetry-sdk", specifier = ">=1.30.0" }, @@ -2107,6 +2112,7 @@ requires-dist = [ { name = "tiktoken" }, { name = "uvicorn", specifier = ">=0.34.0" }, ] +provides-extras = ["client"] [package.metadata.requires-dev] benchmark = [{ name = "locust", specifier = ">=2.39.1" }]