mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
transform_responses_api_request_to_chat_completion_request
This commit is contained in:
parent
0fec9a042c
commit
4e74951a7d
2 changed files with 14 additions and 0 deletions
|
@ -5,6 +5,7 @@ Handler for transforming responses api requests to litellm.completion requests
|
||||||
from typing import Any, Coroutine, Optional, Union
|
from typing import Any, Coroutine, Optional, Union
|
||||||
|
|
||||||
import litellm
|
import litellm
|
||||||
|
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
|
||||||
from litellm.responses.litellm_completion_transformation.streaming_iterator import (
|
from litellm.responses.litellm_completion_transformation.streaming_iterator import (
|
||||||
LiteLLMCompletionStreamingIterator,
|
LiteLLMCompletionStreamingIterator,
|
||||||
)
|
)
|
||||||
|
|
|
@ -7,6 +7,7 @@ from typing import Any, Dict, List, Optional, Union
|
||||||
from openai.types.responses.tool_param import FunctionToolParam
|
from openai.types.responses.tool_param import FunctionToolParam
|
||||||
|
|
||||||
from litellm.caching import InMemoryCache
|
from litellm.caching import InMemoryCache
|
||||||
|
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
|
||||||
from litellm.responses.litellm_completion_transformation.session_handler import (
|
from litellm.responses.litellm_completion_transformation.session_handler import (
|
||||||
ResponsesAPISessionElement,
|
ResponsesAPISessionElement,
|
||||||
SessionHandler,
|
SessionHandler,
|
||||||
|
@ -88,6 +89,18 @@ class LiteLLMCompletionResponsesConfig:
|
||||||
"custom_llm_provider": custom_llm_provider,
|
"custom_llm_provider": custom_llm_provider,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Responses API `Completed` events require usage, we pass `stream_options` to litellm.completion to include usage
|
||||||
|
if stream is True:
|
||||||
|
stream_options = {
|
||||||
|
"include_usage": True,
|
||||||
|
}
|
||||||
|
litellm_completion_request["stream_options"] = stream_options
|
||||||
|
litellm_logging_obj: Optional[LiteLLMLoggingObj] = kwargs.get(
|
||||||
|
"litellm_logging_obj"
|
||||||
|
)
|
||||||
|
if litellm_logging_obj:
|
||||||
|
litellm_logging_obj.stream_options = stream_options
|
||||||
|
|
||||||
# only pass non-None values
|
# only pass non-None values
|
||||||
litellm_completion_request = {
|
litellm_completion_request = {
|
||||||
k: v for k, v in litellm_completion_request.items() if v is not None
|
k: v for k, v in litellm_completion_request.items() if v is not None
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue