diff --git a/litellm/proxy/pass_through_endpoints/llm_provider_handlers/anthropic_passthrough_logging_handler.py b/litellm/proxy/pass_through_endpoints/llm_provider_handlers/anthropic_passthrough_logging_handler.py index 705431fb7a..17ff358bdb 100644 --- a/litellm/proxy/pass_through_endpoints/llm_provider_handlers/anthropic_passthrough_logging_handler.py +++ b/litellm/proxy/pass_through_endpoints/llm_provider_handlers/anthropic_passthrough_logging_handler.py @@ -7,9 +7,6 @@ import httpx import litellm from litellm._logging import verbose_proxy_logger from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj -from litellm.litellm_core_utils.litellm_logging import ( - get_standard_logging_object_payload, -) from litellm.llms.anthropic.chat.handler import ( ModelResponseIterator as AnthropicModelResponseIterator, ) @@ -116,22 +113,11 @@ class AnthropicPassthroughLoggingHandler: {"proxy_server_request": {"body": {"user": user}}} ) - # Make standard logging object for Anthropic - standard_logging_object = get_standard_logging_object_payload( - kwargs=kwargs, - init_response_obj=litellm_model_response, - start_time=start_time, - end_time=end_time, - logging_obj=logging_obj, - status="success", - ) - # pretty print standard logging object verbose_proxy_logger.debug( - "standard_logging_object= %s", - json.dumps(standard_logging_object, indent=4), + "kwargs= %s", + json.dumps(kwargs, indent=4, default=str), ) - kwargs["standard_logging_object"] = standard_logging_object # set litellm_call_id to logging response object litellm_model_response.id = logging_obj.litellm_call_id diff --git a/litellm/proxy/pass_through_endpoints/llm_provider_handlers/vertex_passthrough_logging_handler.py b/litellm/proxy/pass_through_endpoints/llm_provider_handlers/vertex_passthrough_logging_handler.py index ea287b57d5..c2bb961bc3 100644 --- a/litellm/proxy/pass_through_endpoints/llm_provider_handlers/vertex_passthrough_logging_handler.py +++ b/litellm/proxy/pass_through_endpoints/llm_provider_handlers/vertex_passthrough_logging_handler.py @@ -8,9 +8,6 @@ import httpx import litellm from litellm._logging import verbose_proxy_logger from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj -from litellm.litellm_core_utils.litellm_logging import ( - get_standard_logging_object_payload, -) from litellm.llms.vertex_ai.gemini.vertex_and_google_ai_studio_gemini import ( ModelResponseIterator as VertexModelResponseIterator, ) @@ -236,21 +233,8 @@ class VertexPassthroughLoggingHandler: kwargs["response_cost"] = response_cost kwargs["model"] = model - # Make standard logging object for Vertex AI - standard_logging_object = get_standard_logging_object_payload( - kwargs=kwargs, - init_response_obj=litellm_model_response, - start_time=start_time, - end_time=end_time, - logging_obj=logging_obj, - status="success", - ) - # pretty print standard logging object - verbose_proxy_logger.debug( - "standard_logging_object= %s", json.dumps(standard_logging_object, indent=4) - ) - kwargs["standard_logging_object"] = standard_logging_object + verbose_proxy_logger.debug("kwargs= %s", json.dumps(kwargs, indent=4)) # set litellm_call_id to logging response object litellm_model_response.id = logging_obj.litellm_call_id diff --git a/litellm/proxy/pass_through_endpoints/pass_through_endpoints.py b/litellm/proxy/pass_through_endpoints/pass_through_endpoints.py index 1af02d746c..4329a66044 100644 --- a/litellm/proxy/pass_through_endpoints/pass_through_endpoints.py +++ b/litellm/proxy/pass_through_endpoints/pass_through_endpoints.py @@ -4,9 +4,9 @@ import json from base64 import b64encode from datetime import datetime from typing import List, Optional +from urllib.parse import urlparse import httpx -from urllib.parse import urlparse from fastapi import APIRouter, Depends, HTTPException, Request, Response, status from fastapi.responses import StreamingResponse @@ -26,6 +26,7 @@ from litellm.proxy.auth.user_api_key_auth import user_api_key_auth from litellm.proxy.common_utils.http_parsing_utils import _read_request_body from litellm.secret_managers.main import get_secret_str from litellm.types.llms.custom_http import httpxSpecialProvider +from litellm.types.utils import StandardLoggingUserAPIKeyMetadata from .streaming_handler import PassThroughStreamingHandler from .success_handler import PassThroughEndpointLogging @@ -607,12 +608,19 @@ def _init_kwargs_for_pass_through_endpoint( ) -> dict: _parsed_body = _parsed_body or {} _litellm_metadata: Optional[dict] = _parsed_body.pop("litellm_metadata", None) - _metadata = { - "user_api_key": user_api_key_dict.api_key, - "user_api_key_user_id": user_api_key_dict.user_id, - "user_api_key_team_id": user_api_key_dict.team_id, - "user_api_key_end_user_id": user_api_key_dict.end_user_id, - } + _metadata = dict( + StandardLoggingUserAPIKeyMetadata( + user_api_key_hash=user_api_key_dict.api_key, + user_api_key_alias=user_api_key_dict.key_alias, + user_api_key_user_email=user_api_key_dict.user_email, + user_api_key_user_id=user_api_key_dict.user_id, + user_api_key_team_id=user_api_key_dict.team_id, + user_api_key_org_id=user_api_key_dict.org_id, + user_api_key_team_alias=user_api_key_dict.team_alias, + user_api_key_end_user_id=user_api_key_dict.end_user_id, + ) + ) + _metadata["user_api_key"] = user_api_key_dict.api_key if _litellm_metadata: _metadata.update(_litellm_metadata) diff --git a/tests/pass_through_unit_tests/test_pass_through_unit_tests.py b/tests/pass_through_unit_tests/test_pass_through_unit_tests.py index 22ecd53c9e..db0a647e41 100644 --- a/tests/pass_through_unit_tests/test_pass_through_unit_tests.py +++ b/tests/pass_through_unit_tests/test_pass_through_unit_tests.py @@ -124,10 +124,16 @@ def test_init_kwargs_for_pass_through_endpoint_basic( # Check metadata expected_metadata = { "user_api_key": "test-key", + "user_api_key_hash": "test-key", + "user_api_key_alias": None, + "user_api_key_user_email": None, "user_api_key_user_id": "test-user", "user_api_key_team_id": "test-team", + "user_api_key_org_id": None, + "user_api_key_team_alias": None, "user_api_key_end_user_id": "test-user", } + assert result["litellm_params"]["metadata"] == expected_metadata diff --git a/tests/pass_through_unit_tests/test_unit_test_anthropic_pass_through.py b/tests/pass_through_unit_tests/test_unit_test_anthropic_pass_through.py index 889e2aee1f..5404c3ec88 100644 --- a/tests/pass_through_unit_tests/test_unit_test_anthropic_pass_through.py +++ b/tests/pass_through_unit_tests/test_unit_test_anthropic_pass_through.py @@ -200,11 +200,6 @@ def test_create_anthropic_response_logging_payload(mock_logging_obj, metadata_pa assert isinstance(result, dict) assert "model" in result assert "response_cost" in result - assert "standard_logging_object" in result - if metadata_params: - assert "test" == result["standard_logging_object"]["end_user"] - else: - assert "" == result["standard_logging_object"]["end_user"] @pytest.mark.parametrize(