[Feat] Expose Responses API on LiteLLM UI Test Key Page (#10166)

* add /responses API on UI

* add makeOpenAIResponsesRequest

* add makeOpenAIResponsesRequest

* fix add responses API on UI

* fix endpoint selector

* responses API render chunks on litellm chat ui

* fixes to streaming iterator

* fix render responses completed events

* fixes for MockResponsesAPIStreamingIterator

* transform_responses_api_request_to_chat_completion_request

* fix for responses API

* test_basic_openai_responses_api_streaming

* fix base responses api tests
This commit is contained in:
Ishaan Jaff 2025-04-19 13:18:54 -07:00 committed by GitHub
parent 03b5399f86
commit 0717369ae6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 332 additions and 52 deletions

View file

@ -7,6 +7,7 @@ from typing import Any, Dict, List, Optional, Union
from openai.types.responses.tool_param import FunctionToolParam
from litellm.caching import InMemoryCache
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
from litellm.responses.litellm_completion_transformation.session_handler import (
ResponsesAPISessionElement,
SessionHandler,
@ -88,6 +89,18 @@ class LiteLLMCompletionResponsesConfig:
"custom_llm_provider": custom_llm_provider,
}
# Responses API `Completed` events require usage, we pass `stream_options` to litellm.completion to include usage
if stream is True:
stream_options = {
"include_usage": True,
}
litellm_completion_request["stream_options"] = stream_options
litellm_logging_obj: Optional[LiteLLMLoggingObj] = kwargs.get(
"litellm_logging_obj"
)
if litellm_logging_obj:
litellm_logging_obj.stream_options = stream_options
# only pass non-None values
litellm_completion_request = {
k: v for k, v in litellm_completion_request.items() if v is not None