diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 6b7cd0e2f1..46cd3225a5 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -1115,7 +1115,19 @@ async def embeddings(request: Request, user_api_key_dict: UserAPIKeyAuth = Depen except Exception as e: await proxy_logging_obj.post_call_failure_hook(user_api_key_dict=user_api_key_dict, original_exception=e) traceback.print_exc() - raise e + if isinstance(e, HTTPException): + raise e + else: + error_traceback = traceback.format_exc() + error_msg = f"{str(e)}\n\n{error_traceback}" + try: + status = e.status_code # type: ignore + except: + status = 500 + raise HTTPException( + status_code=status, + detail=error_msg + ) #### KEY MANAGEMENT #### diff --git a/litellm/tests/test_proxy_exception_mapping.py b/litellm/tests/test_proxy_exception_mapping.py index da29d0e5f9..9824122e94 100644 --- a/litellm/tests/test_proxy_exception_mapping.py +++ b/litellm/tests/test_proxy_exception_mapping.py @@ -73,12 +73,12 @@ def test_chat_completion_exception_azure(client): # raise openai.BadRequestError - +# chat/completions openai def test_exception_openai_bad_model(client): try: # Your test data test_data = { - "model": "openai/GPT-12", + "model": "azure/GPT-12", "messages": [ { "role": "user", @@ -99,7 +99,7 @@ def test_exception_openai_bad_model(client): except Exception as e: pytest.fail(f"LiteLLM Proxy test failed. Exception {str(e)}") - +# chat/completions any model def test_chat_completion_exception_any_model(client): try: # Your test data @@ -125,3 +125,27 @@ def test_chat_completion_exception_any_model(client): except Exception as e: pytest.fail(f"LiteLLM Proxy test failed. Exception {str(e)}") + + +# embeddings any model +def test_embedding_exception_any_model(client): + try: + # Your test data + test_data = { + "model": "Lite-GPT-12", + "input": ["hi"] + } + + response = client.post("/embeddings", json=test_data) + print("Response from proxy=", response) + + # make an openai client to call _make_status_error_from_response + openai_client = openai.OpenAI(api_key="anything") + openai_exception = openai_client._make_status_error_from_response(response=response) + print("Exception raised=", openai_exception) + assert isinstance(openai_exception, openai.NotFoundError) + + except Exception as e: + pytest.fail(f"LiteLLM Proxy test failed. Exception {str(e)}") + +