mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 18:00:36 +00:00
feat: split API and provider specs into separate llama-stack-api pkg (#3895)
# What does this PR do? Extract API definitions and provider specifications into a standalone llama-stack-api package that can be published to PyPI independently of the main llama-stack server. see: https://github.com/llamastack/llama-stack/pull/2978 and https://github.com/llamastack/llama-stack/pull/2978#issuecomment-3145115942 Motivation External providers currently import from llama-stack, which overrides the installed version and causes dependency conflicts. This separation allows external providers to: - Install only the type definitions they need without server dependencies - Avoid version conflicts with the installed llama-stack package - Be versioned and released independently This enables us to re-enable external provider module tests that were previously blocked by these import conflicts. Changes - Created llama-stack-api package with minimal dependencies (pydantic, jsonschema) - Moved APIs, providers datatypes, strong_typing, and schema_utils - Updated all imports from llama_stack.* to llama_stack_api.* - Configured local editable install for development workflow - Updated linting and type-checking configuration for both packages Next Steps - Publish llama-stack-api to PyPI - Update external provider dependencies - Re-enable external provider module tests Pre-cursor PRs to this one: - #4093 - #3954 - #4064 These PRs moved key pieces _out_ of the Api pkg, limiting the scope of change here. relates to #3237 ## Test Plan Package builds successfully and can be imported independently. All pre-commit hooks pass with expected exclusions maintained. --------- Signed-off-by: Charlie Doern <cdoern@redhat.com>
This commit is contained in:
parent
ceb716b9a0
commit
840ad75fe9
358 changed files with 2337 additions and 1424 deletions
82
src/llama-stack-api/pyproject.toml
Normal file
82
src/llama-stack-api/pyproject.toml
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.uv]
|
||||
required-version = ">=0.7.0"
|
||||
|
||||
[project]
|
||||
name = "llama-stack-api"
|
||||
version = "0.1.0"
|
||||
authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }]
|
||||
description = "API and Provider specifications for Llama Stack - lightweight package with protocol definitions and provider specs"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
license = { "text" = "MIT" }
|
||||
classifiers = [
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Operating System :: OS Independent",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: Information Technology",
|
||||
"Intended Audience :: Science/Research",
|
||||
"Topic :: Scientific/Engineering :: Artificial Intelligence",
|
||||
"Topic :: Scientific/Engineering :: Information Analysis",
|
||||
]
|
||||
dependencies = [
|
||||
"pydantic>=2.11.9",
|
||||
"jsonschema",
|
||||
"opentelemetry-sdk>=1.30.0",
|
||||
"opentelemetry-exporter-otlp-proto-http>=1.30.0",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://github.com/llamastack/llama-stack"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["."]
|
||||
include = ["llama_stack_api", "llama_stack_api.*"]
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
llama_stack_api = ["py.typed"]
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 120
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"UP", # pyupgrade
|
||||
"B", # flake8-bugbear
|
||||
"B9", # flake8-bugbear subset
|
||||
"C", # comprehensions
|
||||
"E", # pycodestyle
|
||||
"F", # Pyflakes
|
||||
"N", # Naming
|
||||
"W", # Warnings
|
||||
"DTZ", # datetime rules
|
||||
"I", # isort (imports order)
|
||||
"RUF001", # Checks for ambiguous Unicode characters in strings
|
||||
"RUF002", # Checks for ambiguous Unicode characters in docstrings
|
||||
"RUF003", # Checks for ambiguous Unicode characters in comments
|
||||
"PLC2401", # Checks for the use of non-ASCII characters in variable names
|
||||
]
|
||||
ignore = [
|
||||
# The following ignores are desired by the project maintainers.
|
||||
"E402", # Module level import not at top of file
|
||||
"E501", # Line too long
|
||||
"F405", # Maybe undefined or defined from star import
|
||||
"C408", # Ignored because we like the dict keyword argument syntax
|
||||
"N812", # Ignored because import torch.nn.functional as F is PyTorch convention
|
||||
|
||||
# These are the additional ones we started ignoring after moving to ruff. We should look into each one of them later.
|
||||
"C901", # Complexity of the function is too high
|
||||
]
|
||||
unfixable = [
|
||||
"PLE2515",
|
||||
] # Do not fix this automatically since ruff will replace the zero-width space with \u200b - let's do it manually
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"llama_stack_api/apis/**/__init__.py" = ["F403"]
|
||||
|
||||
[tool.ruff.lint.pep8-naming]
|
||||
classmethod-decorators = ["classmethod", "pydantic.field_validator"]
|
||||
Loading…
Add table
Add a link
Reference in a new issue