mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-16 02:32:36 +00:00
feat: Add responses and safety impl with extra body
This commit is contained in:
parent
6954fe2274
commit
9152efa1a9
18 changed files with 833 additions and 164 deletions
|
|
@ -25,13 +25,13 @@ classifiers = [
|
|||
]
|
||||
dependencies = [
|
||||
"aiohttp",
|
||||
"fastapi>=0.115.0,<1.0", # server
|
||||
"fire", # for MCP in LLS client
|
||||
"fastapi>=0.115.0,<1.0", # server
|
||||
"fire", # for MCP in LLS client
|
||||
"httpx",
|
||||
"jinja2>=3.1.6",
|
||||
"jsonschema",
|
||||
"llama-stack-client>=0.2.23",
|
||||
"openai>=1.107", # for expires_after support
|
||||
"openai>=1.107", # for expires_after support
|
||||
"prompt-toolkit",
|
||||
"python-dotenv",
|
||||
"python-jose[cryptography]",
|
||||
|
|
@ -42,13 +42,13 @@ dependencies = [
|
|||
"tiktoken",
|
||||
"pillow",
|
||||
"h11>=0.16.0",
|
||||
"python-multipart>=0.0.20", # For fastapi Form
|
||||
"uvicorn>=0.34.0", # server
|
||||
"opentelemetry-sdk>=1.30.0", # server
|
||||
"python-multipart>=0.0.20", # For fastapi Form
|
||||
"uvicorn>=0.34.0", # server
|
||||
"opentelemetry-sdk>=1.30.0", # server
|
||||
"opentelemetry-exporter-otlp-proto-http>=1.30.0", # server
|
||||
"aiosqlite>=0.21.0", # server - for metadata store
|
||||
"asyncpg", # for metadata store
|
||||
"sqlalchemy[asyncio]>=2.0.41", # server - for conversations
|
||||
"aiosqlite>=0.21.0", # server - for metadata store
|
||||
"asyncpg", # for metadata store
|
||||
"sqlalchemy[asyncio]>=2.0.41", # server - for conversations
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
|
|
@ -161,6 +161,7 @@ explicit = true
|
|||
[tool.uv.sources]
|
||||
torch = [{ index = "pytorch-cpu" }]
|
||||
torchvision = [{ index = "pytorch-cpu" }]
|
||||
llama-stack-client = { path = "../llama-stack-client-python" }
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 120
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue