mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-04 10:10:36 +00:00
feat: split API and provider specs into separate llama-stack-api pkg
Extract API definitions, models, and provider specifications into a standalone llama-stack-api package that can be published to PyPI independently of the main llama-stack server. Motivation External providers currently import from llama-stack, which overrides the installed version and causes dependency conflicts. This separation allows external providers to: - Install only the type definitions they need without server dependencies - Avoid version conflicts with the installed llama-stack package - Be versioned and released independently This enables us to re-enable external provider module tests that were previously blocked by these import conflicts. Changes - Created llama-stack-api package with minimal dependencies (pydantic, jsonschema) - Moved APIs, providers datatypes, strong_typing, and schema_utils - Updated all imports from llama_stack.* to llama_stack_api.* - Preserved git history using git mv for moved files - Configured local editable install for development workflow - Updated linting and type-checking configuration for both packages - Rebased on top of upstream src/ layout changes Testing Package builds successfully and can be imported independently. All pre-commit hooks pass with expected exclusions maintained. Next Steps - Publish llama-stack-api to PyPI - Update external provider dependencies - Re-enable external provider module tests Signed-off-by: Charlie Doern <cdoern@redhat.com>
This commit is contained in:
parent
e5a55f3677
commit
85d407c2a0
359 changed files with 1259 additions and 980 deletions
|
|
@ -8,9 +8,8 @@ from unittest.mock import patch
|
|||
|
||||
import pytest
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from llama_stack.apis.common.errors import ResourceNotFoundError
|
||||
from llama_stack.apis.files import OpenAIFilePurpose
|
||||
from llama_stack_api.apis.common.errors import ResourceNotFoundError
|
||||
from llama_stack_api.apis.files import OpenAIFilePurpose
|
||||
|
||||
|
||||
class TestS3FilesImpl:
|
||||
|
|
@ -228,7 +227,7 @@ class TestS3FilesImpl:
|
|||
|
||||
mock_now.return_value = 0
|
||||
|
||||
from llama_stack.apis.files import ExpiresAfter
|
||||
from llama_stack_api.apis.files import ExpiresAfter
|
||||
|
||||
sample_text_file.filename = "test_expired_file"
|
||||
uploaded = await s3_provider.openai_upload_file(
|
||||
|
|
@ -260,7 +259,7 @@ class TestS3FilesImpl:
|
|||
|
||||
async def test_unsupported_expires_after_anchor(self, s3_provider, sample_text_file):
|
||||
"""Unsupported anchor value should raise ValueError."""
|
||||
from llama_stack.apis.files import ExpiresAfter
|
||||
from llama_stack_api.apis.files import ExpiresAfter
|
||||
|
||||
sample_text_file.filename = "test_unsupported_expires_after_anchor"
|
||||
|
||||
|
|
@ -273,7 +272,7 @@ class TestS3FilesImpl:
|
|||
|
||||
async def test_nonint_expires_after_seconds(self, s3_provider, sample_text_file):
|
||||
"""Non-integer seconds in expires_after should raise ValueError."""
|
||||
from llama_stack.apis.files import ExpiresAfter
|
||||
from llama_stack_api.apis.files import ExpiresAfter
|
||||
|
||||
sample_text_file.filename = "test_nonint_expires_after_seconds"
|
||||
|
||||
|
|
@ -286,7 +285,7 @@ class TestS3FilesImpl:
|
|||
|
||||
async def test_expires_after_seconds_out_of_bounds(self, s3_provider, sample_text_file):
|
||||
"""Seconds outside allowed range should raise ValueError."""
|
||||
from llama_stack.apis.files import ExpiresAfter
|
||||
from llama_stack_api.apis.files import ExpiresAfter
|
||||
|
||||
with pytest.raises(ValueError, match="greater than or equal to 3600"):
|
||||
await s3_provider.openai_upload_file(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue