feat: refactor llama-stack-api structure

move llama_stack_api.apis... to top level llama_stack_api.

merge provider datatypes and the existing apis.datatypes into a common llama_stack_api.datatypes

update all usages of these packages throughout LLS

Signed-off-by: Charlie Doern <cdoern@redhat.com>
This commit is contained in:
Charlie Doern 2025-11-12 15:59:34 -05:00
parent d6b915ce0a
commit b7480e9c88
296 changed files with 906 additions and 1109 deletions

View file

@ -8,8 +8,8 @@ import time
from io import BytesIO
import pytest
from llama_stack_api.apis.files import ExpiresAfter
from llama_stack_api.apis.vector_io import Chunk
from llama_stack_api.files import ExpiresAfter
from llama_stack_api.vector_io import Chunk
from llama_stack_client import BadRequestError
from openai import BadRequestError as OpenAIBadRequestError
@ -646,7 +646,7 @@ def test_openai_vector_store_attach_file(
):
"""Test OpenAI vector store attach file."""
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
from llama_stack_api.apis.files import ExpiresAfter
from llama_stack_api.files import ExpiresAfter
compat_client = compat_client_with_empty_stores
@ -710,7 +710,7 @@ def test_openai_vector_store_attach_files_on_creation(
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
compat_client = compat_client_with_empty_stores
from llama_stack_api.apis.files import ExpiresAfter
from llama_stack_api.files import ExpiresAfter
# Create some files and attach them to the vector store
valid_file_ids = []
@ -775,7 +775,7 @@ def test_openai_vector_store_list_files(
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
compat_client = compat_client_with_empty_stores
from llama_stack_api.apis.files import ExpiresAfter
from llama_stack_api.files import ExpiresAfter
# Create a vector store
vector_store = compat_client.vector_stores.create(
@ -867,7 +867,7 @@ def test_openai_vector_store_retrieve_file_contents(
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
compat_client = compat_client_with_empty_stores
from llama_stack_api.apis.files import ExpiresAfter
from llama_stack_api.files import ExpiresAfter
# Create a vector store
vector_store = compat_client.vector_stores.create(
@ -928,7 +928,7 @@ def test_openai_vector_store_delete_file(
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
compat_client = compat_client_with_empty_stores
from llama_stack_api.apis.files import ExpiresAfter
from llama_stack_api.files import ExpiresAfter
# Create a vector store
vector_store = compat_client.vector_stores.create(
@ -994,7 +994,7 @@ def test_openai_vector_store_delete_file_removes_from_vector_store(
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
compat_client = compat_client_with_empty_stores
from llama_stack_api.apis.files import ExpiresAfter
from llama_stack_api.files import ExpiresAfter
# Create a vector store
vector_store = compat_client.vector_stores.create(
@ -1046,7 +1046,7 @@ def test_openai_vector_store_update_file(
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
compat_client = compat_client_with_empty_stores
from llama_stack_api.apis.files import ExpiresAfter
from llama_stack_api.files import ExpiresAfter
# Create a vector store
vector_store = compat_client.vector_stores.create(
@ -1103,7 +1103,7 @@ def test_create_vector_store_files_duplicate_vector_store_name(
This test confirms that client.vector_stores.create() creates a unique ID
"""
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
from llama_stack_api.apis.files import ExpiresAfter
from llama_stack_api.files import ExpiresAfter
compat_client = compat_client_with_empty_stores

View file

@ -5,7 +5,7 @@
# the root directory of this source tree.
import pytest
from llama_stack_api.apis.vector_io import Chunk
from llama_stack_api.vector_io import Chunk
from ..conftest import vector_provider_wrapper