llama-stack-mirror/llama_stack/providers/registry/openai_responses.py
Ben Browning 70c088af3a Stub in an initial OpenAI Responses API
Signed-off-by: Ben Browning <bbrownin@redhat.com>
2025-04-28 10:37:33 -07:00

25 lines
818 B
Python

# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from typing import List
from llama_stack.providers.datatypes import Api, InlineProviderSpec, ProviderSpec
def available_providers() -> List[ProviderSpec]:
return [
InlineProviderSpec(
api=Api.openai_responses,
provider_type="inline::openai-responses",
pip_packages=[],
module="llama_stack.providers.inline.openai_responses",
config_class="llama_stack.providers.inline.openai_responses.config.OpenAIResponsesImplConfig",
api_dependencies=[
Api.models,
Api.inference,
],
),
]