mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
(test) proxy + s3 caching
This commit is contained in:
parent
2ffce2338e
commit
c46051d034
2 changed files with 22 additions and 1 deletions
|
@ -82,9 +82,22 @@ def test_chat_completion(client_no_auth):
|
|||
print(response)
|
||||
|
||||
content = response["choices"][0]["message"]["content"]
|
||||
response1_id = response["id"]
|
||||
|
||||
print("\n content", content)
|
||||
|
||||
assert len(content) > 1
|
||||
|
||||
print("\nmaking 2nd request to proxy. Testing caching + non streaming")
|
||||
response = client_no_auth.post("/v1/chat/completions", json=test_data)
|
||||
print(f"response - {response.text}")
|
||||
assert response.status_code == 200
|
||||
|
||||
response = response.json()
|
||||
print(response)
|
||||
response2_id = response["id"]
|
||||
assert response1_id == response2_id
|
||||
litellm.disable_cache()
|
||||
|
||||
except Exception as e:
|
||||
pytest.fail(f"LiteLLM Proxy test failed. Exception - {str(e)}")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue