From 54653f9a4a4b22417689c6d99c3f93b02c037c01 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Thu, 4 Jan 2024 11:11:08 +0530 Subject: [PATCH] (test) proxy + s3 caching --- .../test_cloudflare_azure_with_cache_config.yaml | 10 +++++++++- litellm/tests/test_proxy_server_caching.py | 13 +++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/litellm/tests/test_configs/test_cloudflare_azure_with_cache_config.yaml b/litellm/tests/test_configs/test_cloudflare_azure_with_cache_config.yaml index 7c4f6ce24..839891a1d 100644 --- a/litellm/tests/test_configs/test_cloudflare_azure_with_cache_config.yaml +++ b/litellm/tests/test_configs/test_cloudflare_azure_with_cache_config.yaml @@ -4,4 +4,12 @@ model_list: model: azure/chatgpt-v-2 api_base: https://gateway.ai.cloudflare.com/v1/0399b10e77ac6668c80404a5ff49eb37/litellm-test/azure-openai/openai-gpt-4-test-v-1 api_key: os.environ/AZURE_API_KEY - api_version: 2023-07-01-preview \ No newline at end of file + api_version: 2023-07-01-preview + +litellm_settings: + set_verbose: True + cache: True # set cache responses to True + cache_params: # set cache params for s3 + type: s3 + s3_bucket_name: cache-bucket-litellm # AWS Bucket Name for S3 + s3_region_name: us-west-2 # AWS Region Name for S3 \ No newline at end of file diff --git a/litellm/tests/test_proxy_server_caching.py b/litellm/tests/test_proxy_server_caching.py index c05a244e9..cb8ca7609 100644 --- a/litellm/tests/test_proxy_server_caching.py +++ b/litellm/tests/test_proxy_server_caching.py @@ -82,9 +82,22 @@ def test_chat_completion(client_no_auth): print(response) content = response["choices"][0]["message"]["content"] + response1_id = response["id"] print("\n content", content) assert len(content) > 1 + + print("\nmaking 2nd request to proxy. Testing caching + non streaming") + response = client_no_auth.post("/v1/chat/completions", json=test_data) + print(f"response - {response.text}") + assert response.status_code == 200 + + response = response.json() + print(response) + response2_id = response["id"] + assert response1_id == response2_id + litellm.disable_cache() + except Exception as e: pytest.fail(f"LiteLLM Proxy test failed. Exception - {str(e)}")