OpenAI Responses - image support and multi-turn tool calling

Signed-off-by: Ben Browning <bbrownin@redhat.com>
This commit is contained in:
Ben Browning 2025-04-18 09:13:48 -04:00 committed by Ashwin Bharambe
parent 35b2e2646f
commit d523c8692a
13 changed files with 186 additions and 34 deletions

View file

@ -24,6 +24,8 @@ distribution_spec:
- inline::braintrust
telemetry:
- inline::meta-reference
openai_responses:
- inline::openai-responses
tool_runtime:
- remote::brave-search
- remote::tavily-search
@ -31,6 +33,4 @@ distribution_spec:
- inline::rag-runtime
- remote::model-context-protocol
- remote::wolfram-alpha
openai_responses:
- inline::openai-responses
image_type: conda

View file

@ -92,6 +92,14 @@ providers:
service_name: "${env.OTEL_SERVICE_NAME:\u200B}"
sinks: ${env.TELEMETRY_SINKS:console,sqlite}
sqlite_db_path: ${env.SQLITE_DB_PATH:~/.llama/distributions/remote-vllm/trace_store.db}
openai_responses:
- provider_id: openai-responses
provider_type: inline::openai-responses
config:
kvstore:
type: sqlite
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/remote-vllm}/openai_responses.db
tool_runtime:
- provider_id: brave-search
provider_type: remote::brave-search
@ -116,14 +124,6 @@ providers:
provider_type: remote::wolfram-alpha
config:
api_key: ${env.WOLFRAM_ALPHA_API_KEY:}
openai_responses:
- provider_id: openai-responses
provider_type: inline::openai-responses
config:
kvstore:
type: sqlite
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/remote-vllm}/openai_responses.db
metadata_store:
type: sqlite
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/remote-vllm}/registry.db

View file

@ -85,6 +85,14 @@ providers:
service_name: "${env.OTEL_SERVICE_NAME:\u200B}"
sinks: ${env.TELEMETRY_SINKS:console,sqlite}
sqlite_db_path: ${env.SQLITE_DB_PATH:~/.llama/distributions/remote-vllm/trace_store.db}
openai_responses:
- provider_id: openai-responses
provider_type: inline::openai-responses
config:
kvstore:
type: sqlite
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/remote-vllm}/openai_responses.db
tool_runtime:
- provider_id: brave-search
provider_type: remote::brave-search
@ -109,14 +117,6 @@ providers:
provider_type: remote::wolfram-alpha
config:
api_key: ${env.WOLFRAM_ALPHA_API_KEY:}
openai_responses:
- provider_id: openai-responses
provider_type: inline::openai-responses
config:
kvstore:
type: sqlite
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/remote-vllm}/openai_responses.db
metadata_store:
type: sqlite
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/remote-vllm}/registry.db

View file

@ -31,6 +31,7 @@ def get_distribution_template() -> DistributionTemplate:
"datasetio": ["remote::huggingface", "inline::localfs"],
"scoring": ["inline::basic", "inline::llm-as-judge", "inline::braintrust"],
"telemetry": ["inline::meta-reference"],
"openai_responses": ["inline::openai-responses"],
"tool_runtime": [
"remote::brave-search",
"remote::tavily-search",
@ -39,7 +40,6 @@ def get_distribution_template() -> DistributionTemplate:
"remote::model-context-protocol",
"remote::wolfram-alpha",
],
"openai_responses": ["inline::openai-responses"],
}
name = "remote-vllm"
inference_provider = Provider(