mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 09:53:45 +00:00
Merged from main + fixed elasticsearch_url
This commit is contained in:
commit
7034637cac
594 changed files with 79447 additions and 35172 deletions
|
|
@ -31,13 +31,13 @@ dependencies = [
|
|||
"httpx",
|
||||
"jinja2>=3.1.6",
|
||||
"jsonschema",
|
||||
"llama-stack-api", # API and provider specifications (local dev via tool.uv.sources)
|
||||
"openai>=2.5.0",
|
||||
"prompt-toolkit",
|
||||
"python-dotenv",
|
||||
"pyjwt[crypto]>=2.10.0", # Pull crypto to support RS256 for jwt. Requires 2.10.0+ for ssl_context support.
|
||||
"pydantic>=2.11.9",
|
||||
"rich",
|
||||
"starlette",
|
||||
"termcolor",
|
||||
"tiktoken",
|
||||
"pillow",
|
||||
|
|
@ -49,11 +49,12 @@ dependencies = [
|
|||
"aiosqlite>=0.21.0", # server - for metadata store
|
||||
"asyncpg", # for metadata store
|
||||
"sqlalchemy[asyncio]>=2.0.41", # server - for conversations
|
||||
"starlette>=0.49.1",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
client = [
|
||||
"llama-stack-client>=0.3.0", # Optional for library-only usage
|
||||
"llama-stack-client>=0.3.0", # Optional for library-only usage
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
|
|
@ -64,13 +65,14 @@ dev = [
|
|||
"pytest-cov",
|
||||
"pytest-html",
|
||||
"pytest-json-report",
|
||||
"pytest-socket", # For blocking network access in unit tests
|
||||
"nbval", # For notebook testing
|
||||
"pytest-socket", # For blocking network access in unit tests
|
||||
"nbval", # For notebook testing
|
||||
"black",
|
||||
"ruff",
|
||||
"mypy",
|
||||
"pre-commit",
|
||||
"ruamel.yaml", # needed for openapi generator
|
||||
"pre-commit>=4.4.0",
|
||||
"ruamel.yaml", # needed for openapi generator
|
||||
"openapi-spec-validator>=0.7.2",
|
||||
]
|
||||
# Type checking dependencies - includes type stubs and optional runtime dependencies
|
||||
# needed for complete mypy coverage across all optional features
|
||||
|
|
@ -112,7 +114,7 @@ unit = [
|
|||
"aiosqlite",
|
||||
"aiohttp",
|
||||
"psycopg2-binary>=2.9.0",
|
||||
"pypdf",
|
||||
"pypdf>=6.1.3",
|
||||
"mcp",
|
||||
"chardet",
|
||||
"sqlalchemy",
|
||||
|
|
@ -135,7 +137,7 @@ test = [
|
|||
"torchvision>=0.21.0",
|
||||
"chardet",
|
||||
"psycopg2-binary>=2.9.0",
|
||||
"pypdf",
|
||||
"pypdf>=6.1.3",
|
||||
"mcp",
|
||||
"datasets>=4.0.0",
|
||||
"autoevals",
|
||||
|
|
@ -181,7 +183,12 @@ install-wheel-from-presigned = "llama_stack.cli.scripts.run:install_wheel_from_p
|
|||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["src"]
|
||||
include = ["llama_stack", "llama_stack.*"]
|
||||
include = [
|
||||
"llama_stack",
|
||||
"llama_stack.*",
|
||||
"llama_stack_api",
|
||||
"llama_stack_api.*",
|
||||
]
|
||||
|
||||
[[tool.uv.index]]
|
||||
name = "pytorch-cpu"
|
||||
|
|
@ -191,6 +198,7 @@ explicit = true
|
|||
[tool.uv.sources]
|
||||
torch = [{ index = "pytorch-cpu" }]
|
||||
torchvision = [{ index = "pytorch-cpu" }]
|
||||
llama-stack-api = [{ path = "src/llama_stack_api", editable = true }]
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 120
|
||||
|
|
@ -247,7 +255,9 @@ unfixable = [
|
|||
# Ignore the following errors for the following files
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"tests/**/*.py" = ["DTZ"] # Ignore datetime rules for tests
|
||||
"src/llama_stack/providers/inline/scoring/basic/utils/ifeval_utils.py" = ["RUF001"]
|
||||
"src/llama_stack/providers/inline/scoring/basic/utils/ifeval_utils.py" = [
|
||||
"RUF001",
|
||||
]
|
||||
"src/llama_stack/providers/inline/scoring/basic/scoring_fn/fn_defs/regex_parser_multiple_choice_answer.py" = [
|
||||
"RUF001",
|
||||
"PLE2515",
|
||||
|
|
@ -258,7 +268,7 @@ unfixable = [
|
|||
|
||||
[tool.mypy]
|
||||
mypy_path = ["src"]
|
||||
packages = ["llama_stack"]
|
||||
packages = ["llama_stack", "llama_stack_api"]
|
||||
plugins = ['pydantic.mypy']
|
||||
disable_error_code = []
|
||||
warn_return_any = true
|
||||
|
|
@ -283,12 +293,13 @@ exclude = [
|
|||
"^src/llama_stack/models/llama/llama3/interface\\.py$",
|
||||
"^src/llama_stack/models/llama/llama3/tokenizer\\.py$",
|
||||
"^src/llama_stack/models/llama/llama3/tool_utils\\.py$",
|
||||
"^src/llama_stack/providers/inline/datasetio/localfs/",
|
||||
"^src/llama_stack/providers/inline/eval/meta_reference/eval\\.py$",
|
||||
"^src/llama_stack/providers/inline/inference/meta_reference/inference\\.py$",
|
||||
"^src/llama_stack/models/llama/llama3/generation\\.py$",
|
||||
"^src/llama_stack/models/llama/llama3/multimodal/model\\.py$",
|
||||
"^src/llama_stack/models/llama/llama4/",
|
||||
"^src/llama_stack/providers/inline/agents/meta_reference/",
|
||||
"^src/llama_stack/providers/inline/datasetio/localfs/",
|
||||
"^src/llama_stack/providers/inline/eval/meta_reference/eval\\.py$",
|
||||
"^src/llama_stack/providers/inline/inference/meta_reference/inference\\.py$",
|
||||
"^src/llama_stack/providers/inline/inference/sentence_transformers/sentence_transformers\\.py$",
|
||||
"^src/llama_stack/providers/inline/post_training/common/validator\\.py$",
|
||||
"^src/llama_stack/providers/inline/safety/code_scanner/",
|
||||
|
|
@ -299,6 +310,7 @@ exclude = [
|
|||
"^src/llama_stack/providers/remote/agents/sample/",
|
||||
"^src/llama_stack/providers/remote/datasetio/huggingface/",
|
||||
"^src/llama_stack/providers/remote/datasetio/nvidia/",
|
||||
"^src/llama_stack/providers/remote/inference/oci/",
|
||||
"^src/llama_stack/providers/remote/inference/bedrock/",
|
||||
"^src/llama_stack/providers/remote/inference/nvidia/",
|
||||
"^src/llama_stack/providers/remote/inference/passthrough/",
|
||||
|
|
@ -338,7 +350,6 @@ exclude = [
|
|||
"^src/llama_stack/providers/utils/telemetry/dataset_mixin\\.py$",
|
||||
"^src/llama_stack/providers/utils/telemetry/trace_protocol\\.py$",
|
||||
"^src/llama_stack/providers/utils/telemetry/tracing\\.py$",
|
||||
"^src/llama_stack/strong_typing/auxiliary\\.py$",
|
||||
"^src/llama_stack/distributions/template\\.py$",
|
||||
]
|
||||
|
||||
|
|
@ -347,6 +358,10 @@ exclude = [
|
|||
module = [
|
||||
"yaml",
|
||||
"fire",
|
||||
"redis.asyncio",
|
||||
"psycopg2",
|
||||
"psycopg2.extras",
|
||||
"psycopg2.extensions",
|
||||
"torchtune.*",
|
||||
"fairscale.*",
|
||||
"torchvision.*",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue