diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 5097640548..853f10234b 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -5104,8 +5104,8 @@ async def google_login(request: Request): redirect_params = {} state = os.getenv("GENERIC_CLIENT_STATE", None) if state: - redirect_params['state'] = state - return await generic_sso.get_login_redirect(**redirect_params) + redirect_params["state"] = state + return await generic_sso.get_login_redirect(**redirect_params) # type: ignore elif ui_username is not None: # No Google, Microsoft SSO # Use UI Credentials set in .env @@ -5368,15 +5368,15 @@ async def auth_callback(request: Request): # generic client id if generic_client_id is not None: - user_id = result.id - user_email = result.email + user_id = getattr(result, "id", None) + user_email = getattr(result, "email", None) user_role = getattr(result, generic_user_role_attribute_name, None) if user_id is None: user_id = getattr(result, "first_name", "") + getattr(result, "last_name", "") user_info = None - user_id_models = [] + user_id_models: List = [] # User might not be already created on first generation of key # But if it is, we want its models preferences diff --git a/litellm/tests/test_promptlayer_integration.py b/litellm/tests/test_promptlayer_integration.py index e869aa5515..b21b813c66 100644 --- a/litellm/tests/test_promptlayer_integration.py +++ b/litellm/tests/test_promptlayer_integration.py @@ -9,8 +9,6 @@ import litellm import pytest -litellm.success_callback = ["promptlayer"] -litellm.set_verbose = True import time # def test_promptlayer_logging(): @@ -45,6 +43,8 @@ def test_promptlayer_logging_with_metadata(): # Redirect stdout old_stdout = sys.stdout sys.stdout = new_stdout = io.StringIO() + litellm.set_verbose = True + litellm.success_callback = ["promptlayer"] response = completion( model="gpt-3.5-turbo", @@ -69,6 +69,9 @@ def test_promptlayer_logging_with_metadata(): def test_promptlayer_logging_with_metadata_tags(): try: # Redirect stdout + litellm.set_verbose = True + + litellm.success_callback = ["promptlayer"] old_stdout = sys.stdout sys.stdout = new_stdout = io.StringIO() @@ -78,7 +81,7 @@ def test_promptlayer_logging_with_metadata_tags(): temperature=0.2, max_tokens=20, metadata={"model": "ai21", "pl_tags": ["env:dev"]}, - mock_response="this is a mock response" + mock_response="this is a mock response", ) # Restore stdout @@ -92,8 +95,6 @@ def test_promptlayer_logging_with_metadata_tags(): except Exception as e: pytest.fail(f"Error occurred: {e}") -test_promptlayer_logging_with_metadata() -test_promptlayer_logging_with_metadata_tags() # def test_chat_openai(): # try: