mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 09:53:45 +00:00
Generate the Stainless client config directly from code so we can validate the config before we ever write the YAML. This change enforces allowed HTTP verbs/paths, detects duplicate routes across resources, and ensures README example endpoints exist and match the OpenAPI spec. The generator now fails fast when config entries drift, keeping the published config (hopefully) more current with the spec. I think more validation can be done but this is a good start.
23 lines
670 B
Python
23 lines
670 B
Python
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
# All rights reserved.
|
|
#
|
|
# This source code is licensed under the terms described in the LICENSE file in
|
|
# the root directory of this source tree.
|
|
|
|
"""
|
|
OpenAPI generator module for Llama Stack.
|
|
|
|
This module provides functionality to generate OpenAPI specifications
|
|
from FastAPI applications.
|
|
"""
|
|
|
|
__all__ = ["generate_openapi_spec", "main"]
|
|
|
|
|
|
def __getattr__(name: str):
|
|
if name in {"generate_openapi_spec", "main"}:
|
|
from .main import generate_openapi_spec as _gos
|
|
from .main import main as _main
|
|
|
|
return {"generate_openapi_spec": _gos, "main": _main}[name]
|
|
raise AttributeError(name)
|