mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
return response headers in response
This commit is contained in:
parent
ca8012090c
commit
46cf4f69ae
1 changed files with 13 additions and 0 deletions
|
@ -5666,6 +5666,7 @@ def convert_to_model_response_object(
|
|||
start_time=None,
|
||||
end_time=None,
|
||||
hidden_params: Optional[dict] = None,
|
||||
response_headers: Optional[dict] = None,
|
||||
):
|
||||
received_args = locals()
|
||||
### CHECK IF ERROR IN RESPONSE ### - openrouter returns these in the dictionary
|
||||
|
@ -5764,6 +5765,9 @@ def convert_to_model_response_object(
|
|||
if hidden_params is not None:
|
||||
model_response_object._hidden_params = hidden_params
|
||||
|
||||
if response_headers is not None:
|
||||
model_response_object.response_headers = response_headers
|
||||
|
||||
return model_response_object
|
||||
elif response_type == "embedding" and (
|
||||
model_response_object is None
|
||||
|
@ -5796,6 +5800,9 @@ def convert_to_model_response_object(
|
|||
if hidden_params is not None:
|
||||
model_response_object._hidden_params = hidden_params
|
||||
|
||||
if response_headers is not None:
|
||||
model_response_object.response_headers = response_headers
|
||||
|
||||
return model_response_object
|
||||
elif response_type == "image_generation" and (
|
||||
model_response_object is None
|
||||
|
@ -5837,6 +5844,10 @@ def convert_to_model_response_object(
|
|||
|
||||
if hidden_params is not None:
|
||||
model_response_object._hidden_params = hidden_params
|
||||
|
||||
if response_headers is not None:
|
||||
model_response_object.response_headers = response_headers
|
||||
|
||||
return model_response_object
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
|
@ -8262,6 +8273,7 @@ class CustomStreamWrapper:
|
|||
logging_obj=None,
|
||||
stream_options=None,
|
||||
make_call: Optional[Callable] = None,
|
||||
response_headers: Optional[dict] = None,
|
||||
):
|
||||
self.model = model
|
||||
self.make_call = make_call
|
||||
|
@ -8293,6 +8305,7 @@ class CustomStreamWrapper:
|
|||
self._hidden_params = {
|
||||
"model_id": (_model_info.get("id", None))
|
||||
} # returned as x-litellm-model-id response header in proxy
|
||||
self.response_headers = response_headers
|
||||
self.response_id = None
|
||||
self.logging_loop = None
|
||||
self.rules = Rules()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue