Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
Sébastien Han 2025-10-30 17:56:42 +01:00
parent ec702ac3fb
commit 38de8ea1f7
No known key found for this signature in database
12 changed files with 26571 additions and 24377 deletions

View file

@ -110,11 +110,20 @@ repos:
name: API Spec Codegen name: API Spec Codegen
additional_dependencies: additional_dependencies:
- uv==0.7.8 - uv==0.7.8
entry: sh -c './scripts/uv-run-with-index.sh run scripts/fastapi_generator.py docs/static' entry: sh -c './scripts/uv-run-with-index.sh run scripts/run_openapi_generator.sh'
language: python language: python
pass_filenames: false pass_filenames: false
require_serial: true require_serial: true
files: ^src/llama_stack/apis/ files: ^src/llama_stack/apis/
- id: openapi-validate
name: OpenAPI Schema Validation
additional_dependencies:
- uv==0.7.8
entry: uv run scripts/validate_openapi.py docs/static/ --quiet
language: python
pass_filenames: false
require_serial: true
files: ^docs/static/.*\.ya?ml$
- id: check-workflows-use-hashes - id: check-workflows-use-hashes
name: Check GitHub Actions use SHA-pinned actions name: Check GitHub Actions use SHA-pinned actions
entry: ./scripts/check-workflows-use-hashes.sh entry: ./scripts/check-workflows-use-hashes.sh

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -75,6 +75,7 @@ dev = [
"mypy", "mypy",
"pre-commit", "pre-commit",
"ruamel.yaml", # needed for openapi generator "ruamel.yaml", # needed for openapi generator
"openapi-spec-validator>=0.7.2",
] ]
# Type checking dependencies - includes type stubs and optional runtime dependencies # Type checking dependencies - includes type stubs and optional runtime dependencies
# needed for complete mypy coverage across all optional features # needed for complete mypy coverage across all optional features

View file

@ -17,6 +17,8 @@ from typing import Annotated, Any, Literal, get_args, get_origin
import yaml import yaml
from fastapi import FastAPI from fastapi import FastAPI
from fastapi.openapi.utils import get_openapi from fastapi.openapi.utils import get_openapi
from openapi_spec_validator import validate_spec
from openapi_spec_validator.exceptions import OpenAPISpecValidatorError
from llama_stack.apis.datatypes import Api from llama_stack.apis.datatypes import Api
from llama_stack.core.resolver import api_protocol_map from llama_stack.core.resolver import api_protocol_map
@ -24,6 +26,9 @@ from llama_stack.core.resolver import api_protocol_map
# Import the existing route discovery system # Import the existing route discovery system
from llama_stack.core.server.routes import get_all_api_routes from llama_stack.core.server.routes import get_all_api_routes
# Global list to store dynamic models created during endpoint generation
_dynamic_models = []
def _get_all_api_routes_with_functions(): def _get_all_api_routes_with_functions():
""" """
@ -108,6 +113,37 @@ def create_llama_stack_app() -> FastAPI:
return app return app
def _extract_path_parameters(path: str) -> list[dict[str, Any]]:
"""
Extract path parameters from a URL path and return them as OpenAPI parameter definitions.
Args:
path: URL path with parameters like /v1/batches/{batch_id}/cancel
Returns:
List of parameter definitions for OpenAPI
"""
import re
# Find all path parameters in the format {param} or {param:type}
param_pattern = r"\{([^}:]+)(?::[^}]+)?\}"
matches = re.findall(param_pattern, path)
parameters = []
for param_name in matches:
parameters.append(
{
"name": param_name,
"in": "path",
"required": True,
"schema": {"type": "string"},
"description": f"Path parameter: {param_name}",
}
)
return parameters
def _create_fastapi_endpoint(app: FastAPI, route, webmethod): def _create_fastapi_endpoint(app: FastAPI, route, webmethod):
""" """
Create a FastAPI endpoint from a discovered route and webmethod. Create a FastAPI endpoint from a discovered route and webmethod.
@ -124,6 +160,12 @@ def _create_fastapi_endpoint(app: FastAPI, route, webmethod):
# Try to find actual models for this endpoint # Try to find actual models for this endpoint
request_model, response_model, query_parameters = _find_models_for_endpoint(webmethod) request_model, response_model, query_parameters = _find_models_for_endpoint(webmethod)
# Debug: Print info for safety endpoints
if "safety" in webmethod.route or "shield" in webmethod.route:
print(
f"Debug: {webmethod.route} - request_model: {request_model}, response_model: {response_model}, query_parameters: {query_parameters}"
)
# Extract response description from webmethod docstring (always try this first) # Extract response description from webmethod docstring (always try this first)
response_description = _extract_response_description_from_docstring(webmethod, response_model) response_description = _extract_response_description_from_docstring(webmethod, response_model)
@ -136,8 +178,56 @@ def _create_fastapi_endpoint(app: FastAPI, route, webmethod):
endpoint_func = typed_endpoint endpoint_func = typed_endpoint
elif response_model and query_parameters: elif response_model and query_parameters:
# Request with individual parameters (could be GET with query params or POST with individual params) # Check if this is a POST/PUT endpoint with individual parameters
# Create a function with the actual query parameters # For POST/PUT, individual parameters should go in request body, not query params
is_post_put = any(method.upper() in ["POST", "PUT", "PATCH"] for method in methods)
if is_post_put:
# POST/PUT with individual parameters - create a request body model
try:
from pydantic import create_model
# Create a dynamic Pydantic model for the request body
field_definitions = {}
for param_name, param_type, default_value in query_parameters:
# Handle complex types that might cause issues with create_model
safe_type = _make_type_safe_for_fastapi(param_type)
if default_value is None:
field_definitions[param_name] = (safe_type, ...) # Required field
else:
field_definitions[param_name] = (safe_type, default_value) # Optional field with default
# Create the request model dynamically
# Clean up the route name to create a valid schema name
clean_route = webmethod.route.replace("/", "_").replace("{", "").replace("}", "").replace("-", "_")
model_name = f"{clean_route}_Request"
print(f"Debug: Creating model {model_name} with fields: {field_definitions}")
request_model = create_model(model_name, **field_definitions)
print(f"Debug: Successfully created model {model_name}")
# Store the dynamic model in the global list for schema inclusion
_dynamic_models.append(request_model)
# Create endpoint with request body
async def typed_endpoint(request: request_model) -> response_model:
"""Typed endpoint for proper schema generation."""
return response_model()
# Set the function signature to ensure FastAPI recognizes the request model
typed_endpoint.__annotations__ = {"request": request_model, "return": response_model}
endpoint_func = typed_endpoint
except Exception as e:
# If dynamic model creation fails, fall back to query parameters
print(f"Warning: Failed to create dynamic request model for {webmethod.route}: {e}")
print(f" Query parameters: {query_parameters}")
# Fall through to the query parameter handling
pass
if not is_post_put:
# GET with query parameters - create a function with the actual query parameters
def create_query_endpoint_func(): def create_query_endpoint_func():
# Build the function signature dynamically # Build the function signature dynamically
import inspect import inspect
@ -153,28 +243,41 @@ def _create_fastapi_endpoint(app: FastAPI, route, webmethod):
if default_value is not None: if default_value is not None:
param_defaults[param_name] = default_value param_defaults[param_name] = default_value
# Create the function with the correct signature
def create_endpoint_func():
# Sort parameters so that required parameters come before optional ones
# Parameters with None default are required, others are optional
sorted_params = sorted(
query_parameters,
key=lambda x: (x[2] is not None, x[0]), # False (required) comes before True (optional)
)
# Create the function signature # Create the function signature
sig = inspect.Signature( sig = inspect.Signature(
[ [
inspect.Parameter( inspect.Parameter(
name=param_name, name=param_name,
kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, kind=inspect.Parameter.POSITIONAL_OR_KEYWORD,
default=default_value, default=default_value if default_value is not None else inspect.Parameter.empty,
annotation=param_annotations[param_name], annotation=param_annotations[param_name],
) )
for param_name, param_type, default_value in query_parameters for param_name, param_type, default_value in sorted_params
] ]
) )
async def query_endpoint(**kwargs) -> response_model: # Create a simple function without **kwargs
async def query_endpoint():
"""Query endpoint for proper schema generation.""" """Query endpoint for proper schema generation."""
return response_model() return response_model()
# Set the signature # Set the signature and annotations
query_endpoint.__signature__ = sig query_endpoint.__signature__ = sig
query_endpoint.__annotations__ = param_annotations query_endpoint.__annotations__ = param_annotations
return query_endpoint return query_endpoint
return create_endpoint_func()
endpoint_func = create_query_endpoint_func() endpoint_func = create_query_endpoint_func()
elif response_model: elif response_model:
# Response-only endpoint (no parameters) # Response-only endpoint (no parameters)
@ -289,6 +392,10 @@ def _find_models_for_endpoint(webmethod) -> tuple[type | None, type | None, list
if param_name == "self": if param_name == "self":
continue continue
# Skip *args and **kwargs parameters - these are not real API parameters
if param.kind in (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD):
continue
# Check if it's a Pydantic model (for POST/PUT requests) # Check if it's a Pydantic model (for POST/PUT requests)
param_type = param.annotation param_type = param.annotation
if hasattr(param_type, "model_json_schema"): if hasattr(param_type, "model_json_schema"):
@ -319,8 +426,17 @@ def _find_models_for_endpoint(webmethod) -> tuple[type | None, type | None, list
elif get_origin(return_annotation) is Annotated: elif get_origin(return_annotation) is Annotated:
# Handle Annotated return types # Handle Annotated return types
args = get_args(return_annotation) args = get_args(return_annotation)
if args and hasattr(args[0], "model_json_schema"): if args:
# Check if the first argument is a Pydantic model
if hasattr(args[0], "model_json_schema"):
response_model = args[0] response_model = args[0]
# Check if the first argument is a union type
elif get_origin(args[0]) is type(args[0]): # Union type
union_args = get_args(args[0])
for arg in union_args:
if hasattr(arg, "model_json_schema"):
response_model = arg
break
elif get_origin(return_annotation) is type(return_annotation): # Union type elif get_origin(return_annotation) is type(return_annotation): # Union type
# Handle union types - try to find the first Pydantic model # Handle union types - try to find the first Pydantic model
args = get_args(return_annotation) args = get_args(return_annotation)
@ -340,6 +456,7 @@ def _make_type_safe_for_fastapi(type_hint) -> type:
""" """
Make a type hint safe for FastAPI by converting problematic types to their base types. Make a type hint safe for FastAPI by converting problematic types to their base types.
This handles cases like Literal["24h"] that cause forward reference errors. This handles cases like Literal["24h"] that cause forward reference errors.
Also removes Union with None to avoid anyOf with type: 'null' schemas.
""" """
# Handle Literal types that might cause issues # Handle Literal types that might cause issues
if hasattr(type_hint, "__origin__") and type_hint.__origin__ is Literal: if hasattr(type_hint, "__origin__") and type_hint.__origin__ is Literal:
@ -369,11 +486,16 @@ def _make_type_safe_for_fastapi(type_hint) -> type:
if origin is type(type_hint) or (hasattr(type_hint, "__args__") and type_hint.__args__): if origin is type(type_hint) or (hasattr(type_hint, "__args__") and type_hint.__args__):
# This is a union type, find the non-None type # This is a union type, find the non-None type
args = get_args(type_hint) args = get_args(type_hint)
for arg in args: non_none_types = [arg for arg in args if arg is not type(None) and arg is not None]
if arg is not type(None) and arg is not None:
return arg if non_none_types:
# Return the first non-None type to avoid anyOf with null
return non_none_types[0]
elif args:
# If all args are None, return the first one # If all args are None, return the first one
return args[0] if args else type_hint return args[0]
else:
return type_hint
# Not a union type, return as-is # Not a union type, return as-is
return type_hint return type_hint
@ -475,6 +597,202 @@ def _find_extra_body_params_for_route(api_name: str, route, webmethod) -> list[d
return [] return []
def _ensure_json_schema_types_included(openapi_schema: dict[str, Any]) -> dict[str, Any]:
"""
Ensure all @json_schema_type decorated models are included in the OpenAPI schema.
This finds all models with the _llama_stack_schema_type attribute and adds them to the schema.
"""
if "components" not in openapi_schema:
openapi_schema["components"] = {}
if "schemas" not in openapi_schema["components"]:
openapi_schema["components"]["schemas"] = {}
# Find all classes with the _llama_stack_schema_type attribute
from llama_stack import apis
# Get all modules in the apis package
apis_modules = []
for module_name in dir(apis):
if not module_name.startswith("_"):
try:
module = getattr(apis, module_name)
if hasattr(module, "__file__"):
apis_modules.append(module)
except (ImportError, AttributeError):
continue
# Also check submodules
for module in apis_modules:
for attr_name in dir(module):
if not attr_name.startswith("_"):
try:
attr = getattr(module, attr_name)
if hasattr(attr, "__file__") and hasattr(attr, "__name__"):
apis_modules.append(attr)
except (ImportError, AttributeError):
continue
# Find all classes with the _llama_stack_schema_type attribute
for module in apis_modules:
for attr_name in dir(module):
try:
attr = getattr(module, attr_name)
if (
hasattr(attr, "_llama_stack_schema_type")
and hasattr(attr, "model_json_schema")
and hasattr(attr, "__name__")
):
schema_name = attr.__name__
if schema_name not in openapi_schema["components"]["schemas"]:
try:
schema = attr.model_json_schema()
openapi_schema["components"]["schemas"][schema_name] = schema
except Exception:
# Skip if we can't generate the schema
continue
except (AttributeError, TypeError):
continue
# Also include any dynamic models that were created during endpoint generation
# This is a workaround to ensure dynamic models appear in the schema
global _dynamic_models
if "_dynamic_models" in globals():
for model in _dynamic_models:
try:
schema_name = model.__name__
if schema_name not in openapi_schema["components"]["schemas"]:
schema = model.model_json_schema()
openapi_schema["components"]["schemas"][schema_name] = schema
except Exception:
# Skip if we can't generate the schema
continue
return openapi_schema
def _fix_ref_references(openapi_schema: dict[str, Any]) -> dict[str, Any]:
"""
Fix $ref references to point to components/schemas instead of $defs.
This prevents the YAML dumper from creating a root-level $defs section.
"""
def fix_refs(obj: Any) -> None:
if isinstance(obj, dict):
if "$ref" in obj and obj["$ref"].startswith("#/$defs/"):
# Replace #/$defs/ with #/components/schemas/
obj["$ref"] = obj["$ref"].replace("#/$defs/", "#/components/schemas/")
for value in obj.values():
fix_refs(value)
elif isinstance(obj, list):
for item in obj:
fix_refs(item)
fix_refs(openapi_schema)
return openapi_schema
def _fix_anyof_with_null(openapi_schema: dict[str, Any]) -> dict[str, Any]:
"""
Fix anyOf schemas that contain type: 'null' by removing the null type
and making the field optional through the required field instead.
"""
def fix_anyof(obj: Any) -> None:
if isinstance(obj, dict):
if "anyOf" in obj and isinstance(obj["anyOf"], list):
# Check if anyOf contains type: 'null'
has_null = any(item.get("type") == "null" for item in obj["anyOf"] if isinstance(item, dict))
if has_null:
# Remove null types and keep only the non-null types
non_null_types = [
item for item in obj["anyOf"] if not (isinstance(item, dict) and item.get("type") == "null")
]
if len(non_null_types) == 1:
# If only one non-null type remains, replace anyOf with that type
obj.update(non_null_types[0])
if "anyOf" in obj:
del obj["anyOf"]
else:
# Keep the anyOf but without null types
obj["anyOf"] = non_null_types
# Recursively process all values
for value in obj.values():
fix_anyof(value)
elif isinstance(obj, list):
for item in obj:
fix_anyof(item)
fix_anyof(openapi_schema)
return openapi_schema
def _eliminate_defs_section(openapi_schema: dict[str, Any]) -> dict[str, Any]:
"""
Eliminate $defs section entirely by moving all definitions to components/schemas.
This matches the structure of the old pyopenapi generator for oasdiff compatibility.
"""
if "components" not in openapi_schema:
openapi_schema["components"] = {}
if "schemas" not in openapi_schema["components"]:
openapi_schema["components"]["schemas"] = {}
# First pass: collect all $defs from anywhere in the schema
defs_to_move = {}
def collect_defs(obj: Any) -> None:
if isinstance(obj, dict):
if "$defs" in obj:
# Collect $defs for later processing
for def_name, def_schema in obj["$defs"].items():
if def_name not in defs_to_move:
defs_to_move[def_name] = def_schema
# Recursively process all values
for value in obj.values():
collect_defs(value)
elif isinstance(obj, list):
for item in obj:
collect_defs(item)
# Collect all $defs
collect_defs(openapi_schema)
# Move all $defs to components/schemas
for def_name, def_schema in defs_to_move.items():
if def_name not in openapi_schema["components"]["schemas"]:
openapi_schema["components"]["schemas"][def_name] = def_schema
# Also move any existing root-level $defs to components/schemas
if "$defs" in openapi_schema:
print(f"Found root-level $defs with {len(openapi_schema['$defs'])} items, moving to components/schemas")
for def_name, def_schema in openapi_schema["$defs"].items():
if def_name not in openapi_schema["components"]["schemas"]:
openapi_schema["components"]["schemas"][def_name] = def_schema
# Remove the root-level $defs
del openapi_schema["$defs"]
# Second pass: remove all $defs sections from anywhere in the schema
def remove_defs(obj: Any) -> None:
if isinstance(obj, dict):
if "$defs" in obj:
del obj["$defs"]
# Recursively process all values
for value in obj.values():
remove_defs(value)
elif isinstance(obj, list):
for item in obj:
remove_defs(item)
# Remove all $defs sections
remove_defs(openapi_schema)
return openapi_schema
def _add_error_responses(openapi_schema: dict[str, Any]) -> dict[str, Any]: def _add_error_responses(openapi_schema: dict[str, Any]) -> dict[str, Any]:
""" """
Add standard error response definitions to the OpenAPI schema. Add standard error response definitions to the OpenAPI schema.
@ -547,10 +865,40 @@ def _add_error_responses(openapi_schema: dict[str, Any]) -> dict[str, Any]:
return openapi_schema return openapi_schema
def _fix_path_parameters(openapi_schema: dict[str, Any]) -> dict[str, Any]:
"""
Fix path parameter resolution issues by adding explicit parameter definitions.
"""
if "paths" not in openapi_schema:
return openapi_schema
for path, path_item in openapi_schema["paths"].items():
# Extract path parameters from the URL
path_params = _extract_path_parameters(path)
if not path_params:
continue
# Add parameters to each operation in this path
for method in ["get", "post", "put", "delete", "patch", "head", "options"]:
if method in path_item and isinstance(path_item[method], dict):
operation = path_item[method]
if "parameters" not in operation:
operation["parameters"] = []
# Add path parameters that aren't already defined
existing_param_names = {p.get("name") for p in operation["parameters"] if p.get("in") == "path"}
for param in path_params:
if param["name"] not in existing_param_names:
operation["parameters"].append(param)
return openapi_schema
def _fix_schema_issues(openapi_schema: dict[str, Any]) -> dict[str, Any]: def _fix_schema_issues(openapi_schema: dict[str, Any]) -> dict[str, Any]:
""" """
Fix common schema issues that cause OpenAPI validation problems. Fix common schema issues that cause OpenAPI validation problems.
This includes converting exclusiveMinimum numbers to minimum values. This includes converting exclusiveMinimum numbers to minimum values and fixing string fields with null defaults.
""" """
if "components" not in openapi_schema or "schemas" not in openapi_schema["components"]: if "components" not in openapi_schema or "schemas" not in openapi_schema["components"]:
return openapi_schema return openapi_schema
@ -560,10 +908,64 @@ def _fix_schema_issues(openapi_schema: dict[str, Any]) -> dict[str, Any]:
# Fix exclusiveMinimum issues # Fix exclusiveMinimum issues
for _, schema_def in schemas.items(): for _, schema_def in schemas.items():
_fix_exclusive_minimum_in_schema(schema_def) _fix_exclusive_minimum_in_schema(schema_def)
_fix_all_null_defaults(schema_def)
return openapi_schema return openapi_schema
def validate_openapi_schema(schema: dict[str, Any], schema_name: str = "OpenAPI schema") -> bool:
"""
Validate an OpenAPI schema using openapi-spec-validator.
Args:
schema: The OpenAPI schema dictionary to validate
schema_name: Name of the schema for error reporting
Returns:
True if valid, False otherwise
Raises:
OpenAPIValidationError: If validation fails
"""
try:
validate_spec(schema)
print(f"{schema_name} is valid")
return True
except OpenAPISpecValidatorError as e:
print(f"{schema_name} validation failed:")
print(f" {e}")
return False
except Exception as e:
print(f"{schema_name} validation error: {e}")
return False
def validate_schema_file(file_path: Path) -> bool:
"""
Validate an OpenAPI schema file (YAML or JSON).
Args:
file_path: Path to the schema file
Returns:
True if valid, False otherwise
"""
try:
with open(file_path) as f:
if file_path.suffix.lower() in [".yaml", ".yml"]:
schema = yaml.safe_load(f)
elif file_path.suffix.lower() == ".json":
schema = json.load(f)
else:
print(f"❌ Unsupported file format: {file_path.suffix}")
return False
return validate_openapi_schema(schema, str(file_path))
except Exception as e:
print(f"❌ Failed to read {file_path}: {e}")
return False
def _fix_exclusive_minimum_in_schema(obj: Any) -> None: def _fix_exclusive_minimum_in_schema(obj: Any) -> None:
""" """
Recursively fix exclusiveMinimum issues in a schema object. Recursively fix exclusiveMinimum issues in a schema object.
@ -586,6 +988,75 @@ def _fix_exclusive_minimum_in_schema(obj: Any) -> None:
_fix_exclusive_minimum_in_schema(item) _fix_exclusive_minimum_in_schema(item)
def _fix_string_fields_with_null_defaults(obj: Any) -> None:
"""
Recursively fix string fields that have default: null.
This violates OpenAPI spec - string fields should either have a string default or be optional.
"""
if isinstance(obj, dict):
# Check if this is a field definition with type: string and default: null
if obj.get("type") == "string" and "default" in obj and obj["default"] is None:
# Remove the default: null to make the field optional
del obj["default"]
# Add nullable: true to indicate the field can be null
obj["nullable"] = True
# Recursively process all values
for value in obj.values():
_fix_string_fields_with_null_defaults(value)
elif isinstance(obj, list):
# Recursively process all items
for item in obj:
_fix_string_fields_with_null_defaults(item)
def _fix_anyof_with_null_defaults(obj: Any) -> None:
"""
Recursively fix anyOf schemas that have default: null.
This violates OpenAPI spec - anyOf fields should not have null defaults.
"""
if isinstance(obj, dict):
# Check if this is a field definition with anyOf and default: null
if "anyOf" in obj and "default" in obj and obj["default"] is None:
# Remove the default: null to make the field optional
del obj["default"]
# Add nullable: true to indicate the field can be null
obj["nullable"] = True
# Recursively process all values
for value in obj.values():
_fix_anyof_with_null_defaults(value)
elif isinstance(obj, list):
# Recursively process all items
for item in obj:
_fix_anyof_with_null_defaults(item)
def _fix_all_null_defaults(obj: Any) -> None:
"""
Recursively fix all field types that have default: null.
This violates OpenAPI spec - fields should not have null defaults.
"""
if isinstance(obj, dict):
# Check if this is a field definition with default: null
if "default" in obj and obj["default"] is None:
# Remove the default: null to make the field optional
del obj["default"]
# Add nullable: true to indicate the field can be null
obj["nullable"] = True
# Recursively process all values
for value in obj.values():
_fix_all_null_defaults(value)
elif isinstance(obj, list):
# Recursively process all items
for item in obj:
_fix_all_null_defaults(item)
def _sort_paths_alphabetically(openapi_schema: dict[str, Any]) -> dict[str, Any]: def _sort_paths_alphabetically(openapi_schema: dict[str, Any]) -> dict[str, Any]:
""" """
Sort the paths in the OpenAPI schema by version prefix first, then alphabetically. Sort the paths in the OpenAPI schema by version prefix first, then alphabetically.
@ -703,6 +1174,15 @@ def _filter_schema_by_version(
filtered_schema["components"]["schemas"] = filtered_schemas filtered_schema["components"]["schemas"] = filtered_schemas
# Preserve $defs section if it exists
if "components" in openapi_schema and "$defs" in openapi_schema["components"]:
if "components" not in filtered_schema:
filtered_schema["components"] = {}
filtered_schema["components"]["$defs"] = openapi_schema["components"]["$defs"]
print(f"Preserved $defs section with {len(openapi_schema['components']['$defs'])} items")
else:
print("No $defs section to preserve")
return filtered_schema return filtered_schema
@ -811,6 +1291,49 @@ def _filter_deprecated_schema(openapi_schema: dict[str, Any]) -> dict[str, Any]:
return filtered_schema return filtered_schema
def _filter_combined_schema(openapi_schema: dict[str, Any]) -> dict[str, Any]:
"""
Filter OpenAPI schema to include both stable (v1) and experimental (v1alpha, v1beta) APIs.
Excludes deprecated endpoints. This is used for the combined "stainless" spec.
"""
filtered_schema = openapi_schema.copy()
if "paths" not in filtered_schema:
return filtered_schema
# Filter paths to include stable (v1) and experimental (v1alpha, v1beta), excluding deprecated
filtered_paths = {}
for path, path_item in filtered_schema["paths"].items():
# Check if path has any deprecated operations
is_deprecated = _is_path_deprecated(path_item)
# Skip deprecated endpoints
if is_deprecated:
continue
# Include /v1/ paths (stable)
if path.startswith("/v1/") and not path.startswith("/v1alpha/") and not path.startswith("/v1beta/"):
filtered_paths[path] = path_item
# Include /v1alpha/ and /v1beta/ paths (experimental)
elif path.startswith("/v1alpha/") or path.startswith("/v1beta/"):
filtered_paths[path] = path_item
filtered_schema["paths"] = filtered_paths
# Filter schemas/components to only include ones referenced by filtered paths
if "components" in filtered_schema and "schemas" in filtered_schema["components"]:
referenced_schemas = _find_schemas_referenced_by_paths(filtered_paths, openapi_schema)
filtered_schemas = {}
for schema_name, schema_def in filtered_schema["components"]["schemas"].items():
if schema_name in referenced_schemas:
filtered_schemas[schema_name] = schema_def
filtered_schema["components"]["schemas"] = filtered_schemas
return filtered_schema
def generate_openapi_spec(output_dir: str, format: str = "yaml", include_examples: bool = True) -> dict[str, Any]: def generate_openapi_spec(output_dir: str, format: str = "yaml", include_examples: bool = True) -> dict[str, Any]:
""" """
Generate OpenAPI specification using FastAPI's built-in method. Generate OpenAPI specification using FastAPI's built-in method.
@ -835,12 +1358,63 @@ def generate_openapi_spec(output_dir: str, format: str = "yaml", include_example
servers=app.servers, servers=app.servers,
) )
# Debug: Check if there's a root-level $defs in the original schema
if "$defs" in openapi_schema:
print(f"Original schema has root-level $defs with {len(openapi_schema['$defs'])} items")
else:
print("Original schema has no root-level $defs")
# Add Llama Stack specific extensions # Add Llama Stack specific extensions
openapi_schema = _add_llama_stack_extensions(openapi_schema, app) openapi_schema = _add_llama_stack_extensions(openapi_schema, app)
# Add standard error responses # Add standard error responses
openapi_schema = _add_error_responses(openapi_schema) openapi_schema = _add_error_responses(openapi_schema)
# Ensure all @json_schema_type decorated models are included
openapi_schema = _ensure_json_schema_types_included(openapi_schema)
# Fix $ref references to point to components/schemas instead of $defs
openapi_schema = _fix_ref_references(openapi_schema)
# Debug: Check if there are any $ref references to $defs in the schema
defs_refs = []
def find_defs_refs(obj: Any, path: str = "") -> None:
if isinstance(obj, dict):
if "$ref" in obj and obj["$ref"].startswith("#/$defs/"):
defs_refs.append(f"{path}: {obj['$ref']}")
for key, value in obj.items():
find_defs_refs(value, f"{path}.{key}" if path else key)
elif isinstance(obj, list):
for i, item in enumerate(obj):
find_defs_refs(item, f"{path}[{i}]")
find_defs_refs(openapi_schema)
if defs_refs:
print(f"Found {len(defs_refs)} $ref references to $defs in schema")
for ref in defs_refs[:5]: # Show first 5
print(f" {ref}")
else:
print("No $ref references to $defs found in schema")
# Note: Let Pydantic/FastAPI generate the correct, standards-compliant schema
# Fields with default values should be optional according to OpenAPI standards
# Fix anyOf schemas with type: 'null' to avoid oasdiff errors
openapi_schema = _fix_anyof_with_null(openapi_schema)
# Fix path parameter resolution issues
openapi_schema = _fix_path_parameters(openapi_schema)
# Eliminate $defs section entirely for oasdiff compatibility
openapi_schema = _eliminate_defs_section(openapi_schema)
# Debug: Check if there's a root-level $defs after flattening
if "$defs" in openapi_schema:
print(f"After flattening: root-level $defs with {len(openapi_schema['$defs'])} items")
else:
print("After flattening: no root-level $defs")
# Ensure all referenced schemas are included # Ensure all referenced schemas are included
# DISABLED: This was using hardcoded schema generation. FastAPI should handle this automatically. # DISABLED: This was using hardcoded schema generation. FastAPI should handle this automatically.
# openapi_schema = _ensure_referenced_schemas(openapi_schema) # openapi_schema = _ensure_referenced_schemas(openapi_schema)
@ -853,7 +1427,7 @@ def generate_openapi_spec(output_dir: str, format: str = "yaml", include_example
# DISABLED: This was a hardcoded workaround. Using Pydantic's TypeAdapter instead. # DISABLED: This was a hardcoded workaround. Using Pydantic's TypeAdapter instead.
# _fix_malformed_schemas(openapi_schema) # _fix_malformed_schemas(openapi_schema)
# Split into stable (v1 only), experimental (v1alpha + v1beta), and deprecated specs # Split into stable (v1 only), experimental (v1alpha + v1beta), deprecated, and combined (stainless) specs
# Each spec needs its own deep copy of the full schema to avoid cross-contamination # Each spec needs its own deep copy of the full schema to avoid cross-contamination
import copy import copy
@ -862,6 +1436,16 @@ def generate_openapi_spec(output_dir: str, format: str = "yaml", include_example
copy.deepcopy(openapi_schema), stable_only=False, exclude_deprecated=True copy.deepcopy(openapi_schema), stable_only=False, exclude_deprecated=True
) )
deprecated_schema = _filter_deprecated_schema(copy.deepcopy(openapi_schema)) deprecated_schema = _filter_deprecated_schema(copy.deepcopy(openapi_schema))
combined_schema = _filter_combined_schema(copy.deepcopy(openapi_schema))
# Update title and description for combined schema
if "info" in combined_schema:
combined_schema["info"]["title"] = "Llama Stack API - Stable & Experimental APIs"
combined_schema["info"]["description"] = (
combined_schema["info"].get("description", "")
+ "\n\n**🔗 COMBINED**: This specification includes both stable production-ready APIs and experimental pre-release APIs. "
"Use stable APIs for production deployments and experimental APIs for testing new features."
)
# Sort paths alphabetically for stable (v1 only) # Sort paths alphabetically for stable (v1 only)
stable_schema = _sort_paths_alphabetically(stable_schema) stable_schema = _sort_paths_alphabetically(stable_schema)
@ -869,11 +1453,24 @@ def generate_openapi_spec(output_dir: str, format: str = "yaml", include_example
experimental_schema = _sort_paths_alphabetically(experimental_schema) experimental_schema = _sort_paths_alphabetically(experimental_schema)
# Sort paths by version prefix for deprecated # Sort paths by version prefix for deprecated
deprecated_schema = _sort_paths_alphabetically(deprecated_schema) deprecated_schema = _sort_paths_alphabetically(deprecated_schema)
# Sort paths by version prefix for combined (stainless)
combined_schema = _sort_paths_alphabetically(combined_schema)
# Fix schema issues (like exclusiveMinimum -> minimum) for each spec # Fix schema issues (like exclusiveMinimum -> minimum) for each spec
stable_schema = _fix_schema_issues(stable_schema) stable_schema = _fix_schema_issues(stable_schema)
experimental_schema = _fix_schema_issues(experimental_schema) experimental_schema = _fix_schema_issues(experimental_schema)
deprecated_schema = _fix_schema_issues(deprecated_schema) deprecated_schema = _fix_schema_issues(deprecated_schema)
combined_schema = _fix_schema_issues(combined_schema)
# Validate the schemas
print("\n🔍 Validating generated schemas...")
stable_valid = validate_openapi_schema(stable_schema, "Stable schema")
experimental_valid = validate_openapi_schema(experimental_schema, "Experimental schema")
deprecated_valid = validate_openapi_schema(deprecated_schema, "Deprecated schema")
combined_valid = validate_openapi_schema(combined_schema, "Combined (stainless) schema")
if not all([stable_valid, experimental_valid, deprecated_valid, combined_valid]):
print("⚠️ Some schemas failed validation, but continuing with generation...")
# Add any custom modifications here if needed # Add any custom modifications here if needed
if include_examples: if include_examples:
@ -887,8 +1484,60 @@ def generate_openapi_spec(output_dir: str, format: str = "yaml", include_example
# Save the stable specification # Save the stable specification
if format in ["yaml", "both"]: if format in ["yaml", "both"]:
yaml_path = output_path / "llama-stack-spec.yaml" yaml_path = output_path / "llama-stack-spec.yaml"
# Use ruamel.yaml for better control over YAML serialization
try:
from ruamel.yaml import YAML
yaml_writer = YAML()
yaml_writer.default_flow_style = False
yaml_writer.sort_keys = False
yaml_writer.width = 4096 # Prevent line wrapping
yaml_writer.allow_unicode = True
with open(yaml_path, "w") as f:
yaml_writer.dump(stable_schema, f)
except ImportError:
# Fallback to standard yaml if ruamel.yaml is not available
with open(yaml_path, "w") as f: with open(yaml_path, "w") as f:
yaml.dump(stable_schema, f, default_flow_style=False, sort_keys=False) yaml.dump(stable_schema, f, default_flow_style=False, sort_keys=False)
# Post-process the YAML file to remove $defs section and fix references
with open(yaml_path) as f:
yaml_content = f.read()
if " $defs:" in yaml_content or "#/$defs/" in yaml_content:
print("Post-processing YAML to remove $defs section")
# Use string replacement to fix references directly
if "#/$defs/" in yaml_content:
refs_fixed = yaml_content.count("#/$defs/")
yaml_content = yaml_content.replace("#/$defs/", "#/components/schemas/")
print(f"Fixed {refs_fixed} $ref references using string replacement")
# Parse the YAML content
yaml_data = yaml.safe_load(yaml_content)
# Move $defs to components/schemas if it exists
if "$defs" in yaml_data:
print(f"Found $defs section with {len(yaml_data['$defs'])} items")
if "components" not in yaml_data:
yaml_data["components"] = {}
if "schemas" not in yaml_data["components"]:
yaml_data["components"]["schemas"] = {}
# Move all $defs to components/schemas
for def_name, def_schema in yaml_data["$defs"].items():
yaml_data["components"]["schemas"][def_name] = def_schema
# Remove the $defs section
del yaml_data["$defs"]
print("Moved $defs to components/schemas")
# Write the modified YAML back
with open(yaml_path, "w") as f:
yaml.dump(yaml_data, f, default_flow_style=False, sort_keys=False)
print("Updated YAML file")
print(f"✅ Generated YAML (stable): {yaml_path}") print(f"✅ Generated YAML (stable): {yaml_path}")
experimental_yaml_path = output_path / "experimental-llama-stack-spec.yaml" experimental_yaml_path = output_path / "experimental-llama-stack-spec.yaml"
@ -901,6 +1550,25 @@ def generate_openapi_spec(output_dir: str, format: str = "yaml", include_example
yaml.dump(deprecated_schema, f, default_flow_style=False, sort_keys=False) yaml.dump(deprecated_schema, f, default_flow_style=False, sort_keys=False)
print(f"✅ Generated YAML (deprecated): {deprecated_yaml_path}") print(f"✅ Generated YAML (deprecated): {deprecated_yaml_path}")
# Generate combined (stainless) spec
stainless_yaml_path = output_path / "stainless-llama-stack-spec.yaml"
try:
from ruamel.yaml import YAML
yaml_writer = YAML()
yaml_writer.default_flow_style = False
yaml_writer.sort_keys = False
yaml_writer.width = 4096 # Prevent line wrapping
yaml_writer.allow_unicode = True
with open(stainless_yaml_path, "w") as f:
yaml_writer.dump(combined_schema, f)
except ImportError:
# Fallback to standard yaml if ruamel.yaml is not available
with open(stainless_yaml_path, "w") as f:
yaml.dump(combined_schema, f, default_flow_style=False, sort_keys=False)
print(f"✅ Generated YAML (stainless/combined): {stainless_yaml_path}")
if format in ["json", "both"]: if format in ["json", "both"]:
json_path = output_path / "llama-stack-spec.json" json_path = output_path / "llama-stack-spec.json"
with open(json_path, "w") as f: with open(json_path, "w") as f:
@ -917,6 +1585,11 @@ def generate_openapi_spec(output_dir: str, format: str = "yaml", include_example
json.dump(deprecated_schema, f, indent=2) json.dump(deprecated_schema, f, indent=2)
print(f"✅ Generated JSON (deprecated): {deprecated_json_path}") print(f"✅ Generated JSON (deprecated): {deprecated_json_path}")
stainless_json_path = output_path / "stainless-llama-stack-spec.json"
with open(stainless_json_path, "w") as f:
json.dump(combined_schema, f, indent=2)
print(f"✅ Generated JSON (stainless/combined): {stainless_json_path}")
# Generate HTML documentation # Generate HTML documentation
html_path = output_path / "llama-stack-spec.html" html_path = output_path / "llama-stack-spec.html"
generate_html_docs(stable_schema, html_path) generate_html_docs(stable_schema, html_path)
@ -930,6 +1603,10 @@ def generate_openapi_spec(output_dir: str, format: str = "yaml", include_example
generate_html_docs(deprecated_schema, deprecated_html_path, spec_file="deprecated-llama-stack-spec.yaml") generate_html_docs(deprecated_schema, deprecated_html_path, spec_file="deprecated-llama-stack-spec.yaml")
print(f"✅ Generated HTML (deprecated): {deprecated_html_path}") print(f"✅ Generated HTML (deprecated): {deprecated_html_path}")
stainless_html_path = output_path / "stainless-llama-stack-spec.html"
generate_html_docs(combined_schema, stainless_html_path, spec_file="stainless-llama-stack-spec.yaml")
print(f"✅ Generated HTML (stainless/combined): {stainless_html_path}")
return stable_schema return stable_schema
@ -968,9 +1645,55 @@ def main():
parser.add_argument("output_dir", help="Output directory for generated files") parser.add_argument("output_dir", help="Output directory for generated files")
parser.add_argument("--format", choices=["yaml", "json", "both"], default="yaml", help="Output format") parser.add_argument("--format", choices=["yaml", "json", "both"], default="yaml", help="Output format")
parser.add_argument("--no-examples", action="store_true", help="Exclude examples from the specification") parser.add_argument("--no-examples", action="store_true", help="Exclude examples from the specification")
parser.add_argument(
"--validate-only", action="store_true", help="Only validate existing schema files, don't generate new ones"
)
parser.add_argument("--validate-file", help="Validate a specific schema file")
args = parser.parse_args() args = parser.parse_args()
# Handle validation-only mode
if args.validate_only or args.validate_file:
if args.validate_file:
# Validate a specific file
file_path = Path(args.validate_file)
if not file_path.exists():
print(f"❌ File not found: {file_path}")
return 1
print(f"🔍 Validating {file_path}...")
is_valid = validate_schema_file(file_path)
return 0 if is_valid else 1
else:
# Validate all schema files in output directory
output_path = Path(args.output_dir)
if not output_path.exists():
print(f"❌ Output directory not found: {output_path}")
return 1
print(f"🔍 Validating all schema files in {output_path}...")
schema_files = (
list(output_path.glob("*.yaml")) + list(output_path.glob("*.yml")) + list(output_path.glob("*.json"))
)
if not schema_files:
print("❌ No schema files found to validate")
return 1
all_valid = True
for schema_file in schema_files:
print(f"\n📄 Validating {schema_file.name}...")
is_valid = validate_schema_file(schema_file)
if not is_valid:
all_valid = False
if all_valid:
print("\n✅ All schema files are valid!")
return 0
else:
print("\n❌ Some schema files failed validation")
return 1
print("🚀 Generating OpenAPI specification using FastAPI...") print("🚀 Generating OpenAPI specification using FastAPI...")
print(f"📁 Output directory: {args.output_dir}") print(f"📁 Output directory: {args.output_dir}")
print(f"📄 Format: {args.format}") print(f"📄 Format: {args.format}")

View file

@ -0,0 +1,19 @@
#!/usr/bin/env bash
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
PYTHONPATH=${PYTHONPATH:-}
THIS_DIR="$(cd "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" && pwd)"
set -euo pipefail
stack_dir=$(dirname "$THIS_DIR")
PYTHONPATH=$PYTHONPATH:$stack_dir \
python3 -m scripts.fastapi_generator "$stack_dir"/docs/static
cp "$stack_dir"/docs/static/stainless-llama-stack-spec.yaml "$stack_dir"/client-sdks/stainless/openapi.yml

290
scripts/validate_openapi.py Executable file
View file

@ -0,0 +1,290 @@
#!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
"""
OpenAPI Schema Validator for Llama Stack.
This script provides comprehensive validation of OpenAPI specifications
using multiple validation tools and approaches.
"""
import argparse
import json
import sys
from pathlib import Path
from typing import Any
import yaml
from openapi_spec_validator import validate_spec
from openapi_spec_validator.exceptions import OpenAPISpecValidatorError
def validate_openapi_schema(schema: dict[str, Any], schema_name: str = "OpenAPI schema") -> bool:
"""
Validate an OpenAPI schema using openapi-spec-validator.
Args:
schema: The OpenAPI schema dictionary to validate
schema_name: Name of the schema for error reporting
Returns:
True if valid, False otherwise
"""
try:
validate_spec(schema)
print(f"{schema_name} is valid")
return True
except OpenAPISpecValidatorError as e:
print(f"{schema_name} validation failed:")
print(f" {e}")
return False
except Exception as e:
print(f"{schema_name} validation error: {e}")
return False
def validate_schema_file(file_path: Path) -> bool:
"""
Validate an OpenAPI schema file (YAML or JSON).
Args:
file_path: Path to the schema file
Returns:
True if valid, False otherwise
"""
try:
with open(file_path) as f:
if file_path.suffix.lower() in [".yaml", ".yml"]:
schema = yaml.safe_load(f)
elif file_path.suffix.lower() == ".json":
schema = json.load(f)
else:
print(f"❌ Unsupported file format: {file_path.suffix}")
return False
return validate_openapi_schema(schema, str(file_path))
except Exception as e:
print(f"❌ Failed to read {file_path}: {e}")
return False
def validate_directory(directory: Path, pattern: str = "*.yaml") -> bool:
"""
Validate all OpenAPI schema files in a directory.
Args:
directory: Directory containing schema files
pattern: Glob pattern to match schema files
Returns:
True if all files are valid, False otherwise
"""
if not directory.exists():
print(f"❌ Directory not found: {directory}")
return False
schema_files = list(directory.glob(pattern)) + list(directory.glob("*.yml")) + list(directory.glob("*.json"))
if not schema_files:
print(f"❌ No schema files found in {directory}")
return False
print(f"🔍 Found {len(schema_files)} schema files to validate")
all_valid = True
for schema_file in schema_files:
print(f"\n📄 Validating {schema_file.name}...")
is_valid = validate_schema_file(schema_file)
if not is_valid:
all_valid = False
return all_valid
def get_schema_stats(schema: dict[str, Any]) -> dict[str, int]:
"""
Get statistics about an OpenAPI schema.
Args:
schema: The OpenAPI schema dictionary
Returns:
Dictionary with schema statistics
"""
stats = {
"paths": len(schema.get("paths", {})),
"schemas": len(schema.get("components", {}).get("schemas", {})),
"operations": 0,
"parameters": 0,
"responses": 0,
}
# Count operations
for path_info in schema.get("paths", {}).values():
for method in ["get", "post", "put", "delete", "patch", "head", "options"]:
if method in path_info:
stats["operations"] += 1
operation = path_info[method]
if "parameters" in operation:
stats["parameters"] += len(operation["parameters"])
if "responses" in operation:
stats["responses"] += len(operation["responses"])
return stats
def print_schema_stats(schema: dict[str, Any], schema_name: str = "Schema") -> None:
"""
Print statistics about an OpenAPI schema.
Args:
schema: The OpenAPI schema dictionary
schema_name: Name of the schema for display
"""
stats = get_schema_stats(schema)
print(f"\n📊 {schema_name} Statistics:")
print(f" 🛣️ Paths: {stats['paths']}")
print(f" 📋 Schemas: {stats['schemas']}")
print(f" 🔧 Operations: {stats['operations']}")
print(f" 📝 Parameters: {stats['parameters']}")
print(f" 📤 Responses: {stats['responses']}")
def main():
"""Main entry point for the OpenAPI validator."""
parser = argparse.ArgumentParser(
description="Validate OpenAPI specifications",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
# Validate a specific file
python validate_openapi.py docs/static/llama-stack-spec.yaml
# Validate all YAML files in a directory
python validate_openapi.py docs/static/
# Validate with detailed statistics
python validate_openapi.py docs/static/llama-stack-spec.yaml --stats
# Validate and show only errors
python validate_openapi.py docs/static/ --quiet
""",
)
parser.add_argument("path", help="Path to schema file or directory containing schema files")
parser.add_argument("--stats", action="store_true", help="Show detailed schema statistics")
parser.add_argument("--quiet", action="store_true", help="Only show errors, suppress success messages")
parser.add_argument("--pattern", default="*.yaml", help="Glob pattern for schema files (default: *.yaml)")
args = parser.parse_args()
path = Path(args.path)
if not path.exists():
print(f"❌ Path not found: {path}")
return 1
if path.is_file():
# Validate a single file
if args.quiet:
# Override the validation function to be quiet
def quiet_validate(schema, name):
try:
validate_spec(schema)
return True
except Exception as e:
print(f"{name}: {e}")
return False
try:
with open(path) as f:
if path.suffix.lower() in [".yaml", ".yml"]:
schema = yaml.safe_load(f)
elif path.suffix.lower() == ".json":
schema = json.load(f)
else:
print(f"❌ Unsupported file format: {path.suffix}")
return 1
is_valid = quiet_validate(schema, str(path))
if is_valid and args.stats:
print_schema_stats(schema, path.name)
return 0 if is_valid else 1
except Exception as e:
print(f"❌ Failed to read {path}: {e}")
return 1
else:
is_valid = validate_schema_file(path)
if is_valid and args.stats:
try:
with open(path) as f:
if path.suffix.lower() in [".yaml", ".yml"]:
schema = yaml.safe_load(f)
elif path.suffix.lower() == ".json":
schema = json.load(f)
else:
return 1
print_schema_stats(schema, path.name)
except Exception:
pass
return 0 if is_valid else 1
elif path.is_dir():
# Validate all files in directory
if args.quiet:
all_valid = True
schema_files = list(path.glob(args.pattern)) + list(path.glob("*.yml")) + list(path.glob("*.json"))
for schema_file in schema_files:
try:
with open(schema_file) as f:
if schema_file.suffix.lower() in [".yaml", ".yml"]:
schema = yaml.safe_load(f)
elif schema_file.suffix.lower() == ".json":
schema = json.load(f)
else:
continue
try:
validate_spec(schema)
except Exception as e:
print(f"{schema_file.name}: {e}")
all_valid = False
except Exception as e:
print(f"❌ Failed to read {schema_file.name}: {e}")
all_valid = False
return 0 if all_valid else 1
else:
all_valid = validate_directory(path, args.pattern)
if all_valid and args.stats:
# Show stats for all files
schema_files = list(path.glob(args.pattern)) + list(path.glob("*.yml")) + list(path.glob("*.json"))
for schema_file in schema_files:
try:
with open(schema_file) as f:
if schema_file.suffix.lower() in [".yaml", ".yml"]:
schema = yaml.safe_load(f)
elif schema_file.suffix.lower() == ".json":
schema = json.load(f)
else:
continue
print_schema_stats(schema, schema_file.name)
except Exception:
continue
return 0 if all_valid else 1
else:
print(f"❌ Invalid path type: {path}")
return 1
if __name__ == "__main__":
sys.exit(main())

View file

@ -1309,6 +1309,7 @@ OpenAIResponseInput = Annotated[
register_schema(OpenAIResponseInput, name="OpenAIResponseInput") register_schema(OpenAIResponseInput, name="OpenAIResponseInput")
@json_schema_type
class ListOpenAIResponseInputItem(BaseModel): class ListOpenAIResponseInputItem(BaseModel):
"""List container for OpenAI response input items. """List container for OpenAI response input items.

View file

@ -99,6 +99,7 @@ class ListToolGroupsResponse(BaseModel):
data: list[ToolGroup] data: list[ToolGroup]
@json_schema_type
class ListToolDefsResponse(BaseModel): class ListToolDefsResponse(BaseModel):
"""Response containing a list of tool definitions. """Response containing a list of tool definitions.

99
uv.lock generated
View file

@ -1824,6 +1824,21 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/fe/54/c86cd8e011fe98803d7e382fd67c0df5ceab8d2b7ad8c5a81524f791551c/jsonschema-4.25.0-py3-none-any.whl", hash = "sha256:24c2e8da302de79c8b9382fee3e76b355e44d2a4364bb207159ce10b517bd716", size = 89184, upload-time = "2025-07-18T15:39:42.956Z" }, { url = "https://files.pythonhosted.org/packages/fe/54/c86cd8e011fe98803d7e382fd67c0df5ceab8d2b7ad8c5a81524f791551c/jsonschema-4.25.0-py3-none-any.whl", hash = "sha256:24c2e8da302de79c8b9382fee3e76b355e44d2a4364bb207159ce10b517bd716", size = 89184, upload-time = "2025-07-18T15:39:42.956Z" },
] ]
[[package]]
name = "jsonschema-path"
version = "0.3.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pathable" },
{ name = "pyyaml" },
{ name = "referencing" },
{ name = "requests" },
]
sdist = { url = "https://files.pythonhosted.org/packages/6e/45/41ebc679c2a4fced6a722f624c18d658dee42612b83ea24c1caf7c0eb3a8/jsonschema_path-0.3.4.tar.gz", hash = "sha256:8365356039f16cc65fddffafda5f58766e34bebab7d6d105616ab52bc4297001", size = 11159, upload-time = "2025-01-24T14:33:16.547Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/cb/58/3485da8cb93d2f393bce453adeef16896751f14ba3e2024bc21dc9597646/jsonschema_path-0.3.4-py3-none-any.whl", hash = "sha256:f502191fdc2b22050f9a81c9237be9d27145b9001c55842bece5e94e382e52f8", size = 14810, upload-time = "2025-01-24T14:33:14.652Z" },
]
[[package]] [[package]]
name = "jsonschema-specifications" name = "jsonschema-specifications"
version = "2025.4.1" version = "2025.4.1"
@ -1903,6 +1918,38 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/89/43/d9bebfc3db7dea6ec80df5cb2aad8d274dd18ec2edd6c4f21f32c237cbbb/kubernetes-33.1.0-py2.py3-none-any.whl", hash = "sha256:544de42b24b64287f7e0aa9513c93cb503f7f40eea39b20f66810011a86eabc5", size = 1941335, upload-time = "2025-06-09T21:57:56.327Z" }, { url = "https://files.pythonhosted.org/packages/89/43/d9bebfc3db7dea6ec80df5cb2aad8d274dd18ec2edd6c4f21f32c237cbbb/kubernetes-33.1.0-py2.py3-none-any.whl", hash = "sha256:544de42b24b64287f7e0aa9513c93cb503f7f40eea39b20f66810011a86eabc5", size = 1941335, upload-time = "2025-06-09T21:57:56.327Z" },
] ]
[[package]]
name = "lazy-object-proxy"
version = "1.12.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/08/a2/69df9c6ba6d316cfd81fe2381e464db3e6de5db45f8c43c6a23504abf8cb/lazy_object_proxy-1.12.0.tar.gz", hash = "sha256:1f5a462d92fd0cfb82f1fab28b51bfb209fabbe6aabf7f0d51472c0c124c0c61", size = 43681, upload-time = "2025-08-22T13:50:06.783Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0d/1b/b5f5bd6bda26f1e15cd3232b223892e4498e34ec70a7f4f11c401ac969f1/lazy_object_proxy-1.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8ee0d6027b760a11cc18281e702c0309dd92da458a74b4c15025d7fc490deede", size = 26746, upload-time = "2025-08-22T13:42:37.572Z" },
{ url = "https://files.pythonhosted.org/packages/55/64/314889b618075c2bfc19293ffa9153ce880ac6153aacfd0a52fcabf21a66/lazy_object_proxy-1.12.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4ab2c584e3cc8be0dfca422e05ad30a9abe3555ce63e9ab7a559f62f8dbc6ff9", size = 71457, upload-time = "2025-08-22T13:42:38.743Z" },
{ url = "https://files.pythonhosted.org/packages/11/53/857fc2827fc1e13fbdfc0ba2629a7d2579645a06192d5461809540b78913/lazy_object_proxy-1.12.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:14e348185adbd03ec17d051e169ec45686dcd840a3779c9d4c10aabe2ca6e1c0", size = 71036, upload-time = "2025-08-22T13:42:40.184Z" },
{ url = "https://files.pythonhosted.org/packages/2b/24/e581ffed864cd33c1b445b5763d617448ebb880f48675fc9de0471a95cbc/lazy_object_proxy-1.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c4fcbe74fb85df8ba7825fa05eddca764138da752904b378f0ae5ab33a36c308", size = 69329, upload-time = "2025-08-22T13:42:41.311Z" },
{ url = "https://files.pythonhosted.org/packages/78/be/15f8f5a0b0b2e668e756a152257d26370132c97f2f1943329b08f057eff0/lazy_object_proxy-1.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:563d2ec8e4d4b68ee7848c5ab4d6057a6d703cb7963b342968bb8758dda33a23", size = 70690, upload-time = "2025-08-22T13:42:42.51Z" },
{ url = "https://files.pythonhosted.org/packages/5d/aa/f02be9bbfb270e13ee608c2b28b8771f20a5f64356c6d9317b20043c6129/lazy_object_proxy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:53c7fd99eb156bbb82cbc5d5188891d8fdd805ba6c1e3b92b90092da2a837073", size = 26563, upload-time = "2025-08-22T13:42:43.685Z" },
{ url = "https://files.pythonhosted.org/packages/f4/26/b74c791008841f8ad896c7f293415136c66cc27e7c7577de4ee68040c110/lazy_object_proxy-1.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:86fd61cb2ba249b9f436d789d1356deae69ad3231dc3c0f17293ac535162672e", size = 26745, upload-time = "2025-08-22T13:42:44.982Z" },
{ url = "https://files.pythonhosted.org/packages/9b/52/641870d309e5d1fb1ea7d462a818ca727e43bfa431d8c34b173eb090348c/lazy_object_proxy-1.12.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:81d1852fb30fab81696f93db1b1e55a5d1ff7940838191062f5f56987d5fcc3e", size = 71537, upload-time = "2025-08-22T13:42:46.141Z" },
{ url = "https://files.pythonhosted.org/packages/47/b6/919118e99d51c5e76e8bf5a27df406884921c0acf2c7b8a3b38d847ab3e9/lazy_object_proxy-1.12.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be9045646d83f6c2664c1330904b245ae2371b5c57a3195e4028aedc9f999655", size = 71141, upload-time = "2025-08-22T13:42:47.375Z" },
{ url = "https://files.pythonhosted.org/packages/e5/47/1d20e626567b41de085cf4d4fb3661a56c159feaa73c825917b3b4d4f806/lazy_object_proxy-1.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:67f07ab742f1adfb3966c40f630baaa7902be4222a17941f3d85fd1dae5565ff", size = 69449, upload-time = "2025-08-22T13:42:48.49Z" },
{ url = "https://files.pythonhosted.org/packages/58/8d/25c20ff1a1a8426d9af2d0b6f29f6388005fc8cd10d6ee71f48bff86fdd0/lazy_object_proxy-1.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:75ba769017b944fcacbf6a80c18b2761a1795b03f8899acdad1f1c39db4409be", size = 70744, upload-time = "2025-08-22T13:42:49.608Z" },
{ url = "https://files.pythonhosted.org/packages/c0/67/8ec9abe15c4f8a4bcc6e65160a2c667240d025cbb6591b879bea55625263/lazy_object_proxy-1.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:7b22c2bbfb155706b928ac4d74c1a63ac8552a55ba7fff4445155523ea4067e1", size = 26568, upload-time = "2025-08-22T13:42:57.719Z" },
{ url = "https://files.pythonhosted.org/packages/23/12/cd2235463f3469fd6c62d41d92b7f120e8134f76e52421413a0ad16d493e/lazy_object_proxy-1.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4a79b909aa16bde8ae606f06e6bbc9d3219d2e57fb3e0076e17879072b742c65", size = 27391, upload-time = "2025-08-22T13:42:50.62Z" },
{ url = "https://files.pythonhosted.org/packages/60/9e/f1c53e39bbebad2e8609c67d0830cc275f694d0ea23d78e8f6db526c12d3/lazy_object_proxy-1.12.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:338ab2f132276203e404951205fe80c3fd59429b3a724e7b662b2eb539bb1be9", size = 80552, upload-time = "2025-08-22T13:42:51.731Z" },
{ url = "https://files.pythonhosted.org/packages/4c/b6/6c513693448dcb317d9d8c91d91f47addc09553613379e504435b4cc8b3e/lazy_object_proxy-1.12.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c40b3c9faee2e32bfce0df4ae63f4e73529766893258eca78548bac801c8f66", size = 82857, upload-time = "2025-08-22T13:42:53.225Z" },
{ url = "https://files.pythonhosted.org/packages/12/1c/d9c4aaa4c75da11eb7c22c43d7c90a53b4fca0e27784a5ab207768debea7/lazy_object_proxy-1.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:717484c309df78cedf48396e420fa57fc8a2b1f06ea889df7248fdd156e58847", size = 80833, upload-time = "2025-08-22T13:42:54.391Z" },
{ url = "https://files.pythonhosted.org/packages/0b/ae/29117275aac7d7d78ae4f5a4787f36ff33262499d486ac0bf3e0b97889f6/lazy_object_proxy-1.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a6b7ea5ea1ffe15059eb44bcbcb258f97bcb40e139b88152c40d07b1a1dfc9ac", size = 79516, upload-time = "2025-08-22T13:42:55.812Z" },
{ url = "https://files.pythonhosted.org/packages/19/40/b4e48b2c38c69392ae702ae7afa7b6551e0ca5d38263198b7c79de8b3bdf/lazy_object_proxy-1.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:08c465fb5cd23527512f9bd7b4c7ba6cec33e28aad36fbbe46bf7b858f9f3f7f", size = 27656, upload-time = "2025-08-22T13:42:56.793Z" },
{ url = "https://files.pythonhosted.org/packages/ef/3a/277857b51ae419a1574557c0b12e0d06bf327b758ba94cafc664cb1e2f66/lazy_object_proxy-1.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c9defba70ab943f1df98a656247966d7729da2fe9c2d5d85346464bf320820a3", size = 26582, upload-time = "2025-08-22T13:49:49.366Z" },
{ url = "https://files.pythonhosted.org/packages/1a/b6/c5e0fa43535bb9c87880e0ba037cdb1c50e01850b0831e80eb4f4762f270/lazy_object_proxy-1.12.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6763941dbf97eea6b90f5b06eb4da9418cc088fce0e3883f5816090f9afcde4a", size = 71059, upload-time = "2025-08-22T13:49:50.488Z" },
{ url = "https://files.pythonhosted.org/packages/06/8a/7dcad19c685963c652624702f1a968ff10220b16bfcc442257038216bf55/lazy_object_proxy-1.12.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fdc70d81235fc586b9e3d1aeef7d1553259b62ecaae9db2167a5d2550dcc391a", size = 71034, upload-time = "2025-08-22T13:49:54.224Z" },
{ url = "https://files.pythonhosted.org/packages/12/ac/34cbfb433a10e28c7fd830f91c5a348462ba748413cbb950c7f259e67aa7/lazy_object_proxy-1.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0a83c6f7a6b2bfc11ef3ed67f8cbe99f8ff500b05655d8e7df9aab993a6abc95", size = 69529, upload-time = "2025-08-22T13:49:55.29Z" },
{ url = "https://files.pythonhosted.org/packages/6f/6a/11ad7e349307c3ca4c0175db7a77d60ce42a41c60bcb11800aabd6a8acb8/lazy_object_proxy-1.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:256262384ebd2a77b023ad02fbcc9326282bcfd16484d5531154b02bc304f4c5", size = 70391, upload-time = "2025-08-22T13:49:56.35Z" },
{ url = "https://files.pythonhosted.org/packages/59/97/9b410ed8fbc6e79c1ee8b13f8777a80137d4bc189caf2c6202358e66192c/lazy_object_proxy-1.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:7601ec171c7e8584f8ff3f4e440aa2eebf93e854f04639263875b8c2971f819f", size = 26988, upload-time = "2025-08-22T13:49:57.302Z" },
]
[[package]] [[package]]
name = "linkify" name = "linkify"
version = "1.4" version = "1.4"
@ -1985,6 +2032,7 @@ dev = [
{ name = "black" }, { name = "black" },
{ name = "mypy" }, { name = "mypy" },
{ name = "nbval" }, { name = "nbval" },
{ name = "openapi-spec-validator" },
{ name = "pre-commit" }, { name = "pre-commit" },
{ name = "pytest" }, { name = "pytest" },
{ name = "pytest-asyncio" }, { name = "pytest-asyncio" },
@ -2132,6 +2180,7 @@ dev = [
{ name = "black" }, { name = "black" },
{ name = "mypy" }, { name = "mypy" },
{ name = "nbval" }, { name = "nbval" },
{ name = "openapi-spec-validator", specifier = ">=0.7.2" },
{ name = "pre-commit" }, { name = "pre-commit" },
{ name = "pytest", specifier = ">=8.4" }, { name = "pytest", specifier = ">=8.4" },
{ name = "pytest-asyncio", specifier = ">=1.0" }, { name = "pytest-asyncio", specifier = ">=1.0" },
@ -2985,6 +3034,35 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/14/f3/ebbd700d8dc1e6380a7a382969d96bc0cbea8717b52fb38ff0ca2a7653e8/openai-2.5.0-py3-none-any.whl", hash = "sha256:21380e5f52a71666dbadbf322dd518bdf2b9d11ed0bb3f96bea17310302d6280", size = 999851, upload-time = "2025-10-17T18:14:45.528Z" }, { url = "https://files.pythonhosted.org/packages/14/f3/ebbd700d8dc1e6380a7a382969d96bc0cbea8717b52fb38ff0ca2a7653e8/openai-2.5.0-py3-none-any.whl", hash = "sha256:21380e5f52a71666dbadbf322dd518bdf2b9d11ed0bb3f96bea17310302d6280", size = 999851, upload-time = "2025-10-17T18:14:45.528Z" },
] ]
[[package]]
name = "openapi-schema-validator"
version = "0.6.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "jsonschema" },
{ name = "jsonschema-specifications" },
{ name = "rfc3339-validator" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8b/f3/5507ad3325169347cd8ced61c232ff3df70e2b250c49f0fe140edb4973c6/openapi_schema_validator-0.6.3.tar.gz", hash = "sha256:f37bace4fc2a5d96692f4f8b31dc0f8d7400fd04f3a937798eaf880d425de6ee", size = 11550, upload-time = "2025-01-10T18:08:22.268Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/21/c6/ad0fba32775ae749016829dace42ed80f4407b171da41313d1a3a5f102e4/openapi_schema_validator-0.6.3-py3-none-any.whl", hash = "sha256:f3b9870f4e556b5a62a1c39da72a6b4b16f3ad9c73dc80084b1b11e74ba148a3", size = 8755, upload-time = "2025-01-10T18:08:19.758Z" },
]
[[package]]
name = "openapi-spec-validator"
version = "0.7.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "jsonschema" },
{ name = "jsonschema-path" },
{ name = "lazy-object-proxy" },
{ name = "openapi-schema-validator" },
]
sdist = { url = "https://files.pythonhosted.org/packages/82/af/fe2d7618d6eae6fb3a82766a44ed87cd8d6d82b4564ed1c7cfb0f6378e91/openapi_spec_validator-0.7.2.tar.gz", hash = "sha256:cc029309b5c5dbc7859df0372d55e9d1ff43e96d678b9ba087f7c56fc586f734", size = 36855, upload-time = "2025-06-07T14:48:56.299Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/27/dd/b3fd642260cb17532f66cc1e8250f3507d1e580483e209dc1e9d13bd980d/openapi_spec_validator-0.7.2-py3-none-any.whl", hash = "sha256:4bbdc0894ec85f1d1bea1d6d9c8b2c3c8d7ccaa13577ef40da9c006c9fd0eb60", size = 39713, upload-time = "2025-06-07T14:48:54.077Z" },
]
[[package]] [[package]]
name = "opentelemetry-api" name = "opentelemetry-api"
version = "1.36.0" version = "1.36.0"
@ -3221,6 +3299,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", size = 103650, upload-time = "2024-04-05T09:43:53.299Z" }, { url = "https://files.pythonhosted.org/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", size = 103650, upload-time = "2024-04-05T09:43:53.299Z" },
] ]
[[package]]
name = "pathable"
version = "0.4.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/67/93/8f2c2075b180c12c1e9f6a09d1a985bc2036906b13dff1d8917e395f2048/pathable-0.4.4.tar.gz", hash = "sha256:6905a3cd17804edfac7875b5f6c9142a218c7caef78693c2dbbbfbac186d88b2", size = 8124, upload-time = "2025-01-10T18:43:13.247Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7d/eb/b6260b31b1a96386c0a880edebe26f89669098acea8e0318bff6adb378fd/pathable-0.4.4-py3-none-any.whl", hash = "sha256:5ae9e94793b6ef5a4cbe0a7ce9dbbefc1eec38df253763fd0aeeacf2762dbbc2", size = 9592, upload-time = "2025-01-10T18:43:11.88Z" },
]
[[package]] [[package]]
name = "pathspec" name = "pathspec"
version = "0.12.1" version = "0.12.1"
@ -4378,6 +4465,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/1c/4c/cc276ce57e572c102d9542d383b2cfd551276581dc60004cb94fe8774c11/responses-0.25.8-py3-none-any.whl", hash = "sha256:0c710af92def29c8352ceadff0c3fe340ace27cf5af1bbe46fb71275bcd2831c", size = 34769, upload-time = "2025-08-08T19:01:45.018Z" }, { url = "https://files.pythonhosted.org/packages/1c/4c/cc276ce57e572c102d9542d383b2cfd551276581dc60004cb94fe8774c11/responses-0.25.8-py3-none-any.whl", hash = "sha256:0c710af92def29c8352ceadff0c3fe340ace27cf5af1bbe46fb71275bcd2831c", size = 34769, upload-time = "2025-08-08T19:01:45.018Z" },
] ]
[[package]]
name = "rfc3339-validator"
version = "0.1.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "six" },
]
sdist = { url = "https://files.pythonhosted.org/packages/28/ea/a9387748e2d111c3c2b275ba970b735e04e15cdb1eb30693b6b5708c4dbd/rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b", size = 5513, upload-time = "2021-05-12T16:37:54.178Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7b/44/4e421b96b67b2daff264473f7465db72fbdf36a07e05494f50300cc7b0c6/rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa", size = 3490, upload-time = "2021-05-12T16:37:52.536Z" },
]
[[package]] [[package]]
name = "rich" name = "rich"
version = "14.1.0" version = "14.1.0"