From e29219e55c6435db23db9719f2fec7d73121fd6b Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Fri, 18 Apr 2025 18:08:05 -0700 Subject: [PATCH] fix handler --- litellm/proxy/proxy_config.yaml | 6 ++++++ .../litellm_completion_transformation/handler.py | 10 ++++++++++ 2 files changed, 16 insertions(+) diff --git a/litellm/proxy/proxy_config.yaml b/litellm/proxy/proxy_config.yaml index a4f43e0e90..d15cd9383e 100644 --- a/litellm/proxy/proxy_config.yaml +++ b/litellm/proxy/proxy_config.yaml @@ -5,3 +5,9 @@ model_list: - model_name: anthropic/* litellm_params: model: anthropic/* + - model_name: gemini/* + litellm_params: + model: gemini/* +litellm_settings: + drop_params: true + diff --git a/litellm/responses/litellm_completion_transformation/handler.py b/litellm/responses/litellm_completion_transformation/handler.py index a5545a11f5..ffabae9e41 100644 --- a/litellm/responses/litellm_completion_transformation/handler.py +++ b/litellm/responses/litellm_completion_transformation/handler.py @@ -52,6 +52,8 @@ class LiteLLMCompletionTransformationHandler: if _is_async: return self.async_response_api_handler( litellm_completion_request=litellm_completion_request, + request_input=input, + responses_api_request=responses_api_request, **kwargs, ) @@ -66,6 +68,8 @@ class LiteLLMCompletionTransformationHandler: responses_api_response: ResponsesAPIResponse = ( LiteLLMCompletionResponsesConfig.transform_chat_completion_response_to_responses_api_response( chat_completion_response=litellm_completion_response, + request_input=input, + responses_api_request=responses_api_request, ) ) @@ -76,6 +80,8 @@ class LiteLLMCompletionTransformationHandler: async def async_response_api_handler( self, litellm_completion_request: dict, + request_input: Union[str, ResponseInputParam], + responses_api_request: ResponsesAPIOptionalRequestParams, **kwargs, ) -> Union[ResponsesAPIResponse, BaseResponsesAPIStreamingIterator]: litellm_completion_response: Union[ @@ -89,6 +95,8 @@ class LiteLLMCompletionTransformationHandler: responses_api_response: ResponsesAPIResponse = ( LiteLLMCompletionResponsesConfig.transform_chat_completion_response_to_responses_api_response( chat_completion_response=litellm_completion_response, + request_input=request_input, + responses_api_request=responses_api_request, ) ) @@ -97,4 +105,6 @@ class LiteLLMCompletionTransformationHandler: elif isinstance(litellm_completion_response, litellm.CustomStreamWrapper): return LiteLLMCompletionStreamingIterator( litellm_custom_stream_wrapper=litellm_completion_response, + request_input=request_input, + responses_api_request=responses_api_request, )