fix test litellm_parent_otel_span

This commit is contained in:
Ishaan Jaff 2024-06-07 14:07:58 -07:00
parent d9dacc1f43
commit 7dcf8fc67e
2 changed files with 6 additions and 8 deletions

View file

@ -103,7 +103,6 @@ def test_chat_completion_exception_azure(mock_acompletion, client):
request_timeout=mock.ANY,
metadata=mock.ANY,
proxy_server_request=mock.ANY,
litellm_parent_otel_span=mock.ANY,
)
json_response = response.json()
@ -211,7 +210,9 @@ def test_chat_completion_exception_any_model(client):
)
assert isinstance(openai_exception, openai.BadRequestError)
_error_message = openai_exception.message
assert "chat_completion: Invalid model name passed in model=Lite-GPT-12" in str(_error_message)
assert "chat_completion: Invalid model name passed in model=Lite-GPT-12" in str(
_error_message
)
except Exception as e:
pytest.fail(f"LiteLLM Proxy test failed. Exception {str(e)}")
@ -239,7 +240,9 @@ def test_embedding_exception_any_model(client):
print("Exception raised=", openai_exception)
assert isinstance(openai_exception, openai.BadRequestError)
_error_message = openai_exception.message
assert "embeddings: Invalid model name passed in model=Lite-GPT-12" in str(_error_message)
assert "embeddings: Invalid model name passed in model=Lite-GPT-12" in str(
_error_message
)
except Exception as e:
pytest.fail(f"LiteLLM Proxy test failed. Exception {str(e)}")
@ -272,7 +275,6 @@ def test_chat_completion_exception_azure_context_window(mock_acompletion, client
request_timeout=mock.ANY,
metadata=mock.ANY,
proxy_server_request=mock.ANY,
litellm_parent_otel_span=mock.ANY,
)
json_response = response.json()

View file

@ -190,7 +190,6 @@ def test_engines_model_chat_completions(mock_acompletion, client_no_auth):
specific_deployment=True,
metadata=mock.ANY,
proxy_server_request=mock.ANY,
litellm_parent_otel_span=mock.ANY,
)
print(f"response - {response.text}")
assert response.status_code == 200
@ -228,7 +227,6 @@ def test_chat_completion_azure(mock_acompletion, client_no_auth):
specific_deployment=True,
metadata=mock.ANY,
proxy_server_request=mock.ANY,
litellm_parent_otel_span=mock.ANY,
)
assert response.status_code == 200
result = response.json()
@ -273,7 +271,6 @@ def test_openai_deployments_model_chat_completions_azure(
specific_deployment=True,
metadata=mock.ANY,
proxy_server_request=mock.ANY,
litellm_parent_otel_span=mock.ANY,
)
assert response.status_code == 200
result = response.json()
@ -488,7 +485,6 @@ def test_chat_completion_optional_params(mock_acompletion, client_no_auth):
specific_deployment=True,
metadata=mock.ANY,
proxy_server_request=mock.ANY,
litellm_parent_otel_span=mock.ANY,
)
assert response.status_code == 200
result = response.json()