forked from phoenix/litellm-mirror
(test) proxy - raise openai.AuthenticationError
This commit is contained in:
parent
b9ff8d74ea
commit
cc85aa9af3
1 changed files with 41 additions and 14 deletions
|
@ -23,23 +23,50 @@ def client():
|
||||||
app.include_router(router) # Include your router in the test app
|
app.include_router(router) # Include your router in the test app
|
||||||
return TestClient(app)
|
return TestClient(app)
|
||||||
|
|
||||||
|
# raise openai.AuthenticationError
|
||||||
def test_chat_completion_exception(client):
|
def test_chat_completion_exception(client):
|
||||||
try:
|
try:
|
||||||
base_url = client.base_url
|
# Your test data
|
||||||
print("Base url of client= ", base_url)
|
test_data = {
|
||||||
|
"model": "gpt-3.5-turbo",
|
||||||
|
"messages": [
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "hi"
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"max_tokens": 10,
|
||||||
|
}
|
||||||
|
|
||||||
openai_client = openai.OpenAI(
|
response = client.post("/chat/completions", json=test_data)
|
||||||
api_key="anything",
|
|
||||||
base_url="http://0.0.0.0:8000",
|
# make an openai client to call _make_status_error_from_response
|
||||||
)
|
openai_client = openai.OpenAI(api_key="anything")
|
||||||
|
openai_exception = openai_client._make_status_error_from_response(response=response)
|
||||||
|
assert isinstance(openai_exception, openai.AuthenticationError)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
pytest.fail(f"LiteLLM Proxy test failed. Exception {str(e)}")
|
||||||
|
def test_chat_completion_exception_azure(client):
|
||||||
|
try:
|
||||||
|
# Your test data
|
||||||
|
test_data = {
|
||||||
|
"model": "azure-gpt-3.5-turbo",
|
||||||
|
"messages": [
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "hi"
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"max_tokens": 10,
|
||||||
|
}
|
||||||
|
|
||||||
|
response = client.post("/chat/completions", json=test_data)
|
||||||
|
|
||||||
|
# make an openai client to call _make_status_error_from_response
|
||||||
|
openai_client = openai.OpenAI(api_key="anything")
|
||||||
|
openai_exception = openai_client._make_status_error_from_response(response=response)
|
||||||
|
assert isinstance(openai_exception, openai.AuthenticationError)
|
||||||
|
|
||||||
response = openai_client.chat.completions.create(model="gpt-3.5-turbo", messages = [
|
|
||||||
{
|
|
||||||
"role": "user",
|
|
||||||
"content": "this is a test request, write a short poem"
|
|
||||||
},
|
|
||||||
])
|
|
||||||
except openai.AuthenticationError:
|
|
||||||
print("Got openai Auth Exception. Good job. The proxy mapped to OpenAI exceptions")
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
pytest.fail(f"LiteLLM Proxy test failed. Exception {str(e)}")
|
pytest.fail(f"LiteLLM Proxy test failed. Exception {str(e)}")
|
Loading…
Add table
Add a link
Reference in a new issue