mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
Litellm dev 11 08 2024 (#6658)
* fix(deepseek/chat): convert content list to str Fixes https://github.com/BerriAI/litellm/issues/6642 * test(test_deepseek_completion.py): implement base llm unit tests increase robustness across providers * fix(router.py): support content policy violation fallbacks with default fallbacks * fix(opentelemetry.py): refactor to move otel imports behing flag Fixes https://github.com/BerriAI/litellm/issues/6636 * fix(opentelemtry.py): close span on success completion * fix(user_api_key_auth.py): allow user_role to default to none * fix: mark flaky test * fix(opentelemetry.py): move otelconfig.from_env to inside the init prevent otel errors raised just by importing the litellm class * fix(user_api_key_auth.py): fix auth error
This commit is contained in:
parent
a9038087cb
commit
7e4dfaa13f
19 changed files with 287 additions and 34 deletions
|
@ -72,6 +72,19 @@ def test_litellm_proxy_server_config_no_general_settings():
|
|||
# Check if the response is successful
|
||||
assert response.status_code == 200
|
||||
assert response.json() == "I'm alive!"
|
||||
|
||||
# Test /chat/completions
|
||||
response = requests.post(
|
||||
"http://localhost:4000/chat/completions",
|
||||
headers={"Authorization": "Bearer 1234567890"},
|
||||
json={
|
||||
"model": "test_openai_models",
|
||||
"messages": [{"role": "user", "content": "Hello, how are you?"}],
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
except ImportError:
|
||||
pytest.fail("Failed to import litellm.proxy_server")
|
||||
except requests.ConnectionError:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue