chore: Stack server no longer depends on llama-stack-client

This commit is contained in:
Ashwin Bharambe 2025-11-06 11:49:37 -08:00
parent 9df073450f
commit 2221cc2cc4
12 changed files with 24 additions and 20 deletions

2
uv.lock generated
View file

@ -1945,7 +1945,6 @@ dependencies = [
{ name = "httpx" },
{ name = "jinja2" },
{ name = "jsonschema" },
{ name = "llama-stack-client" },
{ name = "openai" },
{ name = "opentelemetry-exporter-otlp-proto-http" },
{ name = "opentelemetry-sdk" },
@ -2096,7 +2095,6 @@ requires-dist = [
{ name = "httpx" },
{ name = "jinja2", specifier = ">=3.1.6" },
{ name = "jsonschema" },
{ name = "llama-stack-client", specifier = ">=0.3.0" },
{ name = "llama-stack-client", marker = "extra == 'ui'", specifier = ">=0.3.0" },
{ name = "openai", specifier = ">=2.5.0" },
{ name = "opentelemetry-exporter-otlp-proto-http", specifier = ">=1.30.0" },