refactor: enforce top-level imports for llama-stack-api

Enforce that all imports from llama-stack-api use the form:

from llama_stack_api import <symbol>

 This prevents external code from accessing internal package structure
 (e.g., llama_stack_api.agents, llama_stack_api.common.*) and establishes
 a clear public API boundary.

 Changes:
 - Export 400+ symbols from llama_stack_api/__init__.py
 - Include all API types, common utilities, and strong_typing helpers
 - Update files across src/llama_stack, docs/, tests/, scripts/
 - Convert all submodule imports to top-level imports
 - ensure docs use the proper importing structure

 Addresses PR review feedback requiring explicit __all__ definition to
 prevent "peeking inside" the API package.

Signed-off-by: Charlie Doern <cdoern@redhat.com>
This commit is contained in:
Charlie Doern 2025-11-13 14:03:30 -05:00
parent b7480e9c88
commit 2e5d1c8881
270 changed files with 1587 additions and 750 deletions

View file

@ -58,7 +58,7 @@ External APIs must expose a `available_providers()` function in their module tha
```python
# llama_stack_api_weather/api.py
from llama_stack_api.providers.datatypes import Api, InlineProviderSpec, ProviderSpec
from llama_stack_api import Api, InlineProviderSpec, ProviderSpec
def available_providers() -> list[ProviderSpec]:
@ -79,7 +79,7 @@ A Protocol class like so:
# llama_stack_api_weather/api.py
from typing import Protocol
from llama_stack_api.schema_utils import webmethod
from llama_stack_api import webmethod
class WeatherAPI(Protocol):
@ -151,13 +151,12 @@ __all__ = ["WeatherAPI", "available_providers"]
# llama-stack-api-weather/src/llama_stack_api_weather/weather.py
from typing import Protocol
from llama_stack_api.providers.datatypes import (
from llama_stack_api import (
Api,
ProviderSpec,
RemoteProviderSpec,
webmethod,
)
from llama_stack_api.schema_utils import webmethod
def available_providers() -> list[ProviderSpec]:
return [

View file

@ -153,7 +153,7 @@ description: |
Example using RAGQueryConfig with different search modes:
```python
from llama_stack_api.rag_tool import RAGQueryConfig, RRFRanker, WeightedRanker
from llama_stack_api import RAGQueryConfig, RRFRanker, WeightedRanker
# Vector search
config = RAGQueryConfig(mode="vector", max_chunks=5)
@ -358,7 +358,7 @@ Two ranker types are supported:
Example using RAGQueryConfig with different search modes:
```python
from llama_stack_api.rag_tool import RAGQueryConfig, RRFRanker, WeightedRanker
from llama_stack_api import RAGQueryConfig, RRFRanker, WeightedRanker
# Vector search
config = RAGQueryConfig(mode="vector", max_chunks=5)

View file

@ -16,7 +16,7 @@ import sys
import fire
import ruamel.yaml as yaml
from llama_stack_api.version import LLAMA_STACK_API_V1 # noqa: E402
from llama_stack_api import LLAMA_STACK_API_V1 # noqa: E402
from llama_stack.core.stack import LlamaStack # noqa: E402
from .pyopenapi.options import Options # noqa: E402

View file

@ -16,27 +16,27 @@ from typing import Annotated, Any, Dict, get_args, get_origin, Set, Union
from fastapi import UploadFile
from llama_stack_api.datatypes import Error
from llama_stack_api.strong_typing.core import JsonType
from llama_stack_api.strong_typing.docstring import Docstring, parse_type
from llama_stack_api.strong_typing.inspection import (
from llama_stack_api import (
Docstring,
Error,
JsonSchemaGenerator,
JsonType,
Schema,
SchemaOptions,
get_schema_identifier,
is_generic_list,
is_type_optional,
is_type_union,
is_unwrapped_body_param,
json_dump_string,
object_to_json,
parse_type,
python_type_to_name,
register_schema,
unwrap_generic_list,
unwrap_optional_type,
unwrap_union_types,
)
from llama_stack_api.strong_typing.name import python_type_to_name
from llama_stack_api.strong_typing.schema import (
get_schema_identifier,
JsonSchemaGenerator,
register_schema,
Schema,
SchemaOptions,
)
from llama_stack_api.strong_typing.serialization import json_dump_string, object_to_json
from pydantic import BaseModel
from .operations import (

View file

@ -11,19 +11,21 @@ import typing
from dataclasses import dataclass
from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Tuple, Union
from llama_stack_api.version import LLAMA_STACK_API_V1, LLAMA_STACK_API_V1BETA, LLAMA_STACK_API_V1ALPHA
from termcolor import colored
from llama_stack_api.strong_typing.inspection import get_signature
from typing import get_origin, get_args
from fastapi import UploadFile
from fastapi.params import File, Form
from typing import Annotated
from llama_stack_api.schema_utils import ExtraBodyField
from llama_stack_api import (
ExtraBodyField,
LLAMA_STACK_API_V1,
LLAMA_STACK_API_V1ALPHA,
LLAMA_STACK_API_V1BETA,
get_signature,
)
def split_prefix(

View file

@ -9,7 +9,7 @@ import enum
from dataclasses import dataclass
from typing import Any, ClassVar, Dict, List, Optional, Union
from llama_stack_api.strong_typing.schema import JsonType, Schema, StrictJsonType
from llama_stack_api import JsonType, Schema, StrictJsonType
URL = str

View file

@ -11,8 +11,7 @@ from pathlib import Path
from typing import Any, List, Optional, TextIO, Union, get_type_hints, get_origin, get_args
from pydantic import BaseModel
from llama_stack_api.strong_typing.schema import object_to_json, StrictJsonType
from llama_stack_api.strong_typing.inspection import is_unwrapped_body_param
from llama_stack_api import StrictJsonType, is_unwrapped_body_param, object_to_json
from llama_stack.core.resolver import api_protocol_map
from .generator import Generator
@ -165,12 +164,12 @@ def _validate_api_delete_method_returns_none(method) -> str | None:
return "has no return type annotation"
return_type = hints['return']
# Allow OpenAI endpoints to return response objects since they follow OpenAI specification
method_name = getattr(method, '__name__', '')
if method_name.__contains__('openai_'):
return None
if return_type is not None and return_type is not type(None):
return "does not return None where None is mandatory"