mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
[Feat] Expose Responses API on LiteLLM UI Test Key Page (#10166)
* add /responses API on UI * add makeOpenAIResponsesRequest * add makeOpenAIResponsesRequest * fix add responses API on UI * fix endpoint selector * responses API render chunks on litellm chat ui * fixes to streaming iterator * fix render responses completed events * fixes for MockResponsesAPIStreamingIterator * transform_responses_api_request_to_chat_completion_request * fix for responses API * test_basic_openai_responses_api_streaming * fix base responses api tests
This commit is contained in:
parent
518a4900f6
commit
17f55e9937
8 changed files with 332 additions and 52 deletions
|
@ -7,6 +7,7 @@ from typing import Any, Dict, List, Optional, Union
|
|||
from openai.types.responses.tool_param import FunctionToolParam
|
||||
|
||||
from litellm.caching import InMemoryCache
|
||||
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
|
||||
from litellm.responses.litellm_completion_transformation.session_handler import (
|
||||
ResponsesAPISessionElement,
|
||||
SessionHandler,
|
||||
|
@ -88,6 +89,18 @@ class LiteLLMCompletionResponsesConfig:
|
|||
"custom_llm_provider": custom_llm_provider,
|
||||
}
|
||||
|
||||
# Responses API `Completed` events require usage, we pass `stream_options` to litellm.completion to include usage
|
||||
if stream is True:
|
||||
stream_options = {
|
||||
"include_usage": True,
|
||||
}
|
||||
litellm_completion_request["stream_options"] = stream_options
|
||||
litellm_logging_obj: Optional[LiteLLMLoggingObj] = kwargs.get(
|
||||
"litellm_logging_obj"
|
||||
)
|
||||
if litellm_logging_obj:
|
||||
litellm_logging_obj.stream_options = stream_options
|
||||
|
||||
# only pass non-None values
|
||||
litellm_completion_request = {
|
||||
k: v for k, v in litellm_completion_request.items() if v is not None
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue