diff --git a/litellm/proxy/tests/test_proxy_exception_mapping.py b/litellm/proxy/tests/test_proxy_exception_mapping.py new file mode 100644 index 000000000..64a7f978d --- /dev/null +++ b/litellm/proxy/tests/test_proxy_exception_mapping.py @@ -0,0 +1,23 @@ +import openai +client = openai.OpenAI( + api_key="anything", + # base_url="http://0.0.0.0:8000", +) + +try: + # request sent to model set on litellm proxy, `litellm --model` + response = client.chat.completions.create(model="gpt-3.5-turbo", messages = [ + { + "role": "user", + "content": "this is a test request, write a short poem" + }, + ]) + + print(response) +# except openai.APITimeoutError: +# print("Got openai Timeout Exception. Good job. The proxy mapped to OpenAI exceptions") +except Exception as e: + print("\n the proxy did not map to OpenAI exception. Instead got", e) + print(e.type) + print(e.message) + print(e.code) \ No newline at end of file