mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 02:53:30 +00:00
Fix OpenAPI generation to have text/event-stream for streamable methods
This commit is contained in:
parent
acbecbf8b3
commit
bba6edd06b
4 changed files with 703 additions and 705 deletions
|
@ -34,20 +34,6 @@ schema_utils.json_schema_type = json_schema_type
|
|||
from llama_stack.distribution.stack import LlamaStack
|
||||
|
||||
|
||||
# TODO: this should be fixed in the generator itself so it reads appropriate annotations
|
||||
STREAMING_ENDPOINTS = [
|
||||
"/agents/turn/create",
|
||||
"/inference/chat_completion",
|
||||
]
|
||||
|
||||
|
||||
def patch_sse_stream_responses(spec: Specification):
|
||||
for path, path_item in spec.document.paths.items():
|
||||
if path in STREAMING_ENDPOINTS:
|
||||
content = path_item.post.responses["200"].content.pop("application/json")
|
||||
path_item.post.responses["200"].content["text/event-stream"] = content
|
||||
|
||||
|
||||
def main(output_dir: str):
|
||||
output_dir = Path(output_dir)
|
||||
if not output_dir.exists():
|
||||
|
@ -74,8 +60,6 @@ def main(output_dir: str):
|
|||
),
|
||||
)
|
||||
|
||||
patch_sse_stream_responses(spec)
|
||||
|
||||
with open(output_dir / "llama-stack-spec.yaml", "w", encoding="utf-8") as fp:
|
||||
yaml.dump(spec.get_json(), fp, allow_unicode=True)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue