mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 09:53:45 +00:00
feat: split API and provider specs into separate llama-stack-api pkg
Extract API definitions, models, and provider specifications into a standalone llama-stack-api package that can be published to PyPI independently of the main llama-stack server. Motivation External providers currently import from llama-stack, which overrides the installed version and causes dependency conflicts. This separation allows external providers to: - Install only the type definitions they need without server dependencies - Avoid version conflicts with the installed llama-stack package - Be versioned and released independently This enables us to re-enable external provider module tests that were previously blocked by these import conflicts. Changes - Created llama-stack-api package with minimal dependencies (pydantic, jsonschema) - Moved APIs, providers datatypes, strong_typing, and schema_utils - Updated all imports from llama_stack.* to llama_stack_api.* - Preserved git history using git mv for moved files - Configured local editable install for development workflow - Updated linting and type-checking configuration for both packages - Rebased on top of upstream src/ layout changes Testing Package builds successfully and can be imported independently. All pre-commit hooks pass with expected exclusions maintained. Next Steps - Publish llama-stack-api to PyPI - Update external provider dependencies - Re-enable external provider module tests Signed-off-by: Charlie Doern <cdoern@redhat.com>
This commit is contained in:
parent
e5a55f3677
commit
85d407c2a0
359 changed files with 1259 additions and 980 deletions
|
|
@ -14,8 +14,8 @@ import os
|
|||
from pathlib import Path
|
||||
|
||||
import fire
|
||||
from llama_stack_api.apis.common.errors import ModelNotFoundError
|
||||
|
||||
from llama_stack.apis.common.errors import ModelNotFoundError
|
||||
from llama_stack.models.llama.llama3.generation import Llama3
|
||||
from llama_stack.models.llama.llama4.generation import Llama4
|
||||
from llama_stack.models.llama.sku_list import resolve_model
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ def get_api_docstring(api_name: str) -> str | None:
|
|||
"""Extract docstring from the API protocol class."""
|
||||
try:
|
||||
# Import the API module dynamically
|
||||
api_module = __import__(f"llama_stack.apis.{api_name}", fromlist=[api_name.title()])
|
||||
api_module = __import__(f"llama_stack_api.apis.{api_name}", fromlist=[api_name.title()])
|
||||
|
||||
# Get the main protocol class (usually capitalized API name)
|
||||
protocol_class_name = api_name.title()
|
||||
|
|
@ -83,8 +83,9 @@ def get_config_class_info(config_class_path: str) -> dict[str, Any]:
|
|||
# this string replace is ridiculous
|
||||
field_type = field_type.replace("typing.", "").replace("Optional[", "").replace("]", "")
|
||||
field_type = field_type.replace("Annotated[", "").replace("FieldInfo(", "").replace(")", "")
|
||||
field_type = field_type.replace("llama_stack.apis.inference.inference.", "")
|
||||
field_type = field_type.replace("llama_stack_api.apis.inference.inference.", "")
|
||||
field_type = field_type.replace("llama_stack.providers.", "")
|
||||
field_type = field_type.replace("llama_stack_api.providers.", "")
|
||||
|
||||
default_value = field.default
|
||||
if field.default_factory is not None:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue