From 715d4f8d8c37be8f2988c08a4da5add48a5cc144 Mon Sep 17 00:00:00 2001 From: Eric Huang Date: Mon, 3 Nov 2025 16:16:02 -0800 Subject: [PATCH] test # What does this PR do? ## Test Plan --- pyproject.toml | 19 +++---- .../distributions/ci-tests/run.yaml | 7 +++ tests/integration/conftest.py | 1 + uv.lock | 51 ++++++++++++++++--- 4 files changed, 62 insertions(+), 16 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8f07f9cbd..bf8c5bb85 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,8 +25,8 @@ classifiers = [ ] dependencies = [ "aiohttp", - "fastapi>=0.115.0,<1.0", # server - "fire", # for MCP in LLS client + "fastapi>=0.115.0,<1.0", # server + "fire", # for MCP in LLS client "httpx", "jinja2>=3.1.6", "jsonschema", @@ -34,7 +34,7 @@ dependencies = [ "openai>=2.5.0", "prompt-toolkit", "python-dotenv", - "pyjwt[crypto]>=2.10.0", # Pull crypto to support RS256 for jwt. Requires 2.10.0+ for ssl_context support. + "pyjwt[crypto]>=2.10.0", # Pull crypto to support RS256 for jwt. Requires 2.10.0+ for ssl_context support. "pydantic>=2.11.9", "rich", "starlette", @@ -42,13 +42,13 @@ dependencies = [ "tiktoken", "pillow", "h11>=0.16.0", - "python-multipart>=0.0.20", # For fastapi Form - "uvicorn>=0.34.0", # server - "opentelemetry-sdk>=1.30.0", # server + "python-multipart>=0.0.20", # For fastapi Form + "uvicorn>=0.34.0", # server + "opentelemetry-sdk>=1.30.0", # server "opentelemetry-exporter-otlp-proto-http>=1.30.0", # server - "aiosqlite>=0.21.0", # server - for metadata store - "asyncpg", # for metadata store - "sqlalchemy[asyncio]>=2.0.41", # server - for conversations + "aiosqlite>=0.21.0", # server - for metadata store + "asyncpg", # for metadata store + "sqlalchemy[asyncio]>=2.0.41", # server - for conversations ] [project.optional-dependencies] @@ -192,6 +192,7 @@ explicit = true [tool.uv.sources] torch = [{ index = "pytorch-cpu" }] torchvision = [{ index = "pytorch-cpu" }] +llama-stack-client = { path = "../llama-stack-client-python", editable = true } [tool.ruff] line-length = 120 diff --git a/src/llama_stack/distributions/ci-tests/run.yaml b/src/llama_stack/distributions/ci-tests/run.yaml index 702acff8e..63c509d29 100644 --- a/src/llama_stack/distributions/ci-tests/run.yaml +++ b/src/llama_stack/distributions/ci-tests/run.yaml @@ -143,6 +143,13 @@ providers: persistence: namespace: vector_io::weaviate backend: kv_default + - provider_id: openai-vector-store + provider_type: remote::openai + config: + api_key: ${env.OPENAI_API_KEY:=} + persistence: + namespace: vector_io::openai_vector_store + backend: kv_default files: - provider_id: meta-reference-files provider_type: inline::localfs diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index e5ae72fc1..6b5f7bd2d 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -371,6 +371,7 @@ def vector_provider_wrapper(func): # For CI tests (replay/record), only use providers that are available in ci-tests environment if os.environ.get("LLAMA_STACK_TEST_INFERENCE_MODE") in ("replay", "record"): all_providers = ["faiss", "sqlite-vec"] + all_providers = ["openai-vector-store"] else: # For live tests, try all providers (they'll skip if not available) all_providers = [ diff --git a/uv.lock b/uv.lock index de1c8879c..f8991a121 100644 --- a/uv.lock +++ b/uv.lock @@ -2096,8 +2096,8 @@ requires-dist = [ { name = "httpx" }, { name = "jinja2", specifier = ">=3.1.6" }, { name = "jsonschema" }, - { name = "llama-stack-client", specifier = ">=0.3.0" }, - { name = "llama-stack-client", marker = "extra == 'ui'", specifier = ">=0.3.0" }, + { name = "llama-stack-client", editable = "../llama-stack-client-python" }, + { name = "llama-stack-client", marker = "extra == 'ui'", editable = "../llama-stack-client-python" }, { name = "openai", specifier = ">=2.5.0" }, { name = "opentelemetry-exporter-otlp-proto-http", specifier = ">=1.30.0" }, { name = "opentelemetry-sdk", specifier = ">=1.30.0" }, @@ -2232,8 +2232,8 @@ unit = [ [[package]] name = "llama-stack-client" -version = "0.3.0" -source = { registry = "https://pypi.org/simple" } +version = "0.4.0a1" +source = { editable = "../llama-stack-client-python" } dependencies = [ { name = "anyio" }, { name = "click" }, @@ -2251,10 +2251,47 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1d/d9/3c720f420fc80ce51de1a0ad90c53edc613617b68980137dcf716a86198a/llama_stack_client-0.3.0.tar.gz", hash = "sha256:1e974a74d0da285e18ba7df30b9a324e250782b130253bcef3e695830c5bb03d", size = 340443, upload-time = "2025-10-21T23:58:25.855Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/96/27/1c65035ce58100be22409c98e4d65b1cdaeff7811ea968f9f844641330d7/llama_stack_client-0.3.0-py3-none-any.whl", hash = "sha256:9f85d84d508ef7da44b96ca8555d7783da717cfc9135bab6a5530fe8c852690d", size = 425234, upload-time = "2025-10-21T23:58:24.246Z" }, + +[package.metadata] +requires-dist = [ + { name = "aiohttp", marker = "extra == 'aiohttp'" }, + { name = "anyio", specifier = ">=3.5.0,<5" }, + { name = "click" }, + { name = "distro", specifier = ">=1.7.0,<2" }, + { name = "fire" }, + { name = "httpx", specifier = ">=0.23.0,<1" }, + { name = "httpx-aiohttp", marker = "extra == 'aiohttp'", specifier = ">=0.1.9" }, + { name = "pandas" }, + { name = "prompt-toolkit" }, + { name = "pyaml" }, + { name = "pydantic", specifier = ">=1.9.0,<3" }, + { name = "requests" }, + { name = "rich" }, + { name = "sniffio" }, + { name = "termcolor" }, + { name = "tqdm" }, + { name = "typing-extensions", specifier = ">=4.7,<5" }, ] +provides-extras = ["aiohttp"] + +[package.metadata.requires-dev] +dev = [ + { name = "black" }, + { name = "dirty-equals", specifier = ">=0.6.0" }, + { name = "importlib-metadata", specifier = ">=6.7.0" }, + { name = "mypy" }, + { name = "pre-commit" }, + { name = "pyright", specifier = "==1.1.399" }, + { name = "pytest", specifier = ">=7.1.1" }, + { name = "pytest-asyncio" }, + { name = "pytest-xdist", specifier = ">=3.6.1" }, + { name = "respx" }, + { name = "rich", specifier = ">=13.7.1" }, + { name = "ruff" }, + { name = "time-machine" }, +] +pydantic-v1 = [{ name = "pydantic", specifier = ">=1.9.0,<2" }] +pydantic-v2 = [{ name = "pydantic", specifier = ">=2,<3" }] [[package]] name = "lm-format-enforcer"