mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-27 18:50:41 +00:00
# What does this PR do? dropped python3.10, updated pyproject and dependencies, and also removed some blocks of code with special handling for enum.StrEnum Closes #2458 Signed-off-by: Charlie Doern <cdoern@redhat.com>
43 lines
780 B
TOML
43 lines
780 B
TOML
[project]
|
|
dependencies = [
|
|
"llama-stack",
|
|
"pydantic",
|
|
"ollama",
|
|
"aiohttp",
|
|
"aiosqlite",
|
|
"autoevals",
|
|
"chardet",
|
|
"chromadb-client",
|
|
"datasets",
|
|
"faiss-cpu",
|
|
"fastapi",
|
|
"fire",
|
|
"httpx",
|
|
"matplotlib",
|
|
"mcp",
|
|
"nltk",
|
|
"numpy",
|
|
"openai",
|
|
"opentelemetry-exporter-otlp-proto-http",
|
|
"opentelemetry-sdk",
|
|
"pandas",
|
|
"pillow",
|
|
"psycopg2-binary",
|
|
"pymongo",
|
|
"pypdf",
|
|
"redis",
|
|
"requests",
|
|
"scikit-learn",
|
|
"scipy",
|
|
"sentencepiece",
|
|
"tqdm",
|
|
"transformers",
|
|
"tree_sitter",
|
|
"uvicorn",
|
|
]
|
|
|
|
name = "llama-stack-provider-ollama"
|
|
version = "0.1.0"
|
|
description = "External provider for Ollama using the Llama Stack API"
|
|
readme = "README.md"
|
|
requires-python = ">=3.11"
|