From 49f65b7eb8c3968791c4665b5bf9c12ff37f9b6d Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Mon, 2 Oct 2023 10:28:19 -0700 Subject: [PATCH] add litellm hosted cache test --- litellm/tests/test_caching.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/litellm/tests/test_caching.py b/litellm/tests/test_caching.py index 8a91217dc..f90d8bb31 100644 --- a/litellm/tests/test_caching.py +++ b/litellm/tests/test_caching.py @@ -287,4 +287,20 @@ def test_custom_redis_cache_with_key(): # test_custom_redis_cache_with_key() +def test_hosted_cache(): + litellm.cache = Cache(type="hosted") # use api.litellm.ai for caching + + messages = [{"role": "user", "content": "what is litellm arr today?"}] + response1 = completion(model="gpt-3.5-turbo", messages=messages, caching=True) + print("response1", response1) + + response2 = completion(model="gpt-3.5-turbo", messages=messages, caching=True) + print("response2", response2) + + if response1['choices'][0]['message']['content'] != response2['choices'][0]['message']['content']: # 1 and 2 should be the same + print(f"response1: {response1}") + print(f"response2: {response2}") + pytest.fail(f"Hosted cache: Response2 is not cached and the same as response 1") + +# test_hosted_cache()