(test) proxy + s3 caching

This commit is contained in:
ishaan-jaff 2024-01-04 11:11:08 +05:30
parent aa757d19f5
commit 54653f9a4a
2 changed files with 22 additions and 1 deletions

View file

@ -4,4 +4,12 @@ model_list:
model: azure/chatgpt-v-2
api_base: https://gateway.ai.cloudflare.com/v1/0399b10e77ac6668c80404a5ff49eb37/litellm-test/azure-openai/openai-gpt-4-test-v-1
api_key: os.environ/AZURE_API_KEY
api_version: 2023-07-01-preview
api_version: 2023-07-01-preview
litellm_settings:
set_verbose: True
cache: True # set cache responses to True
cache_params: # set cache params for s3
type: s3
s3_bucket_name: cache-bucket-litellm # AWS Bucket Name for S3
s3_region_name: us-west-2 # AWS Region Name for S3

View file

@ -82,9 +82,22 @@ def test_chat_completion(client_no_auth):
print(response)
content = response["choices"][0]["message"]["content"]
response1_id = response["id"]
print("\n content", content)
assert len(content) > 1
print("\nmaking 2nd request to proxy. Testing caching + non streaming")
response = client_no_auth.post("/v1/chat/completions", json=test_data)
print(f"response - {response.text}")
assert response.status_code == 200
response = response.json()
print(response)
response2_id = response["id"]
assert response1_id == response2_id
litellm.disable_cache()
except Exception as e:
pytest.fail(f"LiteLLM Proxy test failed. Exception - {str(e)}")