mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-30 19:40:01 +00:00
Stub in an initial OpenAI Responses API
Signed-off-by: Ben Browning <bbrownin@redhat.com>
This commit is contained in:
parent
c149cf2e0f
commit
70c088af3a
18 changed files with 441 additions and 0 deletions
|
|
@ -39,6 +39,7 @@ def get_distribution_template() -> DistributionTemplate:
|
|||
"remote::model-context-protocol",
|
||||
"remote::wolfram-alpha",
|
||||
],
|
||||
"openai_responses": ["inline::openai-responses"],
|
||||
}
|
||||
name = "remote-vllm"
|
||||
inference_provider = Provider(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue