From 94c01cd3d389e56532017c42d3a5125b090a46a8 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Mon, 14 Apr 2025 22:05:48 -0700 Subject: [PATCH] fix mock tests --- .../test_anthropic_cache_control_hook.py | 248 ++++++++++-------- 1 file changed, 133 insertions(+), 115 deletions(-) diff --git a/tests/litellm/integrations/test_anthropic_cache_control_hook.py b/tests/litellm/integrations/test_anthropic_cache_control_hook.py index fd5f3698ac..3604da2f2d 100644 --- a/tests/litellm/integrations/test_anthropic_cache_control_hook.py +++ b/tests/litellm/integrations/test_anthropic_cache_control_hook.py @@ -21,131 +21,149 @@ from litellm.types.utils import StandardCallbackDynamicParams @pytest.mark.asyncio async def test_anthropic_cache_control_hook_system_message(): - anthropic_cache_control_hook = AnthropicCacheControlHook() - litellm.callbacks = [anthropic_cache_control_hook] - - # Mock response data - mock_response = MagicMock() - mock_response.json.return_value = { - "output": { - "message": { - "role": "assistant", - "content": "Here is my analysis of the key terms and conditions...", - } + # Use patch.dict to mock environment variables instead of setting them directly + with patch.dict( + os.environ, + { + "AWS_ACCESS_KEY_ID": "fake_access_key_id", + "AWS_SECRET_ACCESS_KEY": "fake_secret_access_key", + "AWS_REGION_NAME": "us-west-2", }, - "stopReason": "stop_sequence", - "usage": { - "inputTokens": 100, - "outputTokens": 200, - "totalTokens": 300, - "cacheReadInputTokens": 100, - "cacheWriteInputTokens": 200, - }, - } - mock_response.status_code = 200 + ): + anthropic_cache_control_hook = AnthropicCacheControlHook() + litellm.callbacks = [anthropic_cache_control_hook] - # Mock AsyncHTTPHandler.post method - client = AsyncHTTPHandler() - with patch.object(client, "post", return_value=mock_response) as mock_post: - response = await litellm.acompletion( - model="bedrock/anthropic.claude-3-5-haiku-20241022-v1:0", - messages=[ - { - "role": "system", - "content": [ - { - "type": "text", - "text": "You are an AI assistant tasked with analyzing legal documents.", - }, - { - "type": "text", - "text": "Here is the full text of a complex legal agreement", - }, - ], - }, - { - "role": "user", - "content": "what are the key terms and conditions in this agreement?", - }, - ], - cache_control_injection_points=[ - { - "location": "message", - "role": "system", - }, - ], - client=client, - ) + # Mock response data + mock_response = MagicMock() + mock_response.json.return_value = { + "output": { + "message": { + "role": "assistant", + "content": "Here is my analysis of the key terms and conditions...", + } + }, + "stopReason": "stop_sequence", + "usage": { + "inputTokens": 100, + "outputTokens": 200, + "totalTokens": 300, + "cacheReadInputTokens": 100, + "cacheWriteInputTokens": 200, + }, + } + mock_response.status_code = 200 - mock_post.assert_called_once() - request_body = json.loads(mock_post.call_args.kwargs["data"]) + # Mock AsyncHTTPHandler.post method + client = AsyncHTTPHandler() + with patch.object(client, "post", return_value=mock_response) as mock_post: + response = await litellm.acompletion( + model="bedrock/anthropic.claude-3-5-haiku-20241022-v1:0", + messages=[ + { + "role": "system", + "content": [ + { + "type": "text", + "text": "You are an AI assistant tasked with analyzing legal documents.", + }, + { + "type": "text", + "text": "Here is the full text of a complex legal agreement", + }, + ], + }, + { + "role": "user", + "content": "what are the key terms and conditions in this agreement?", + }, + ], + cache_control_injection_points=[ + { + "location": "message", + "role": "system", + }, + ], + client=client, + ) - print("request_body: ", json.dumps(request_body, indent=4)) + mock_post.assert_called_once() + request_body = json.loads(mock_post.call_args.kwargs["data"]) - # Verify the request body - assert request_body["system"][1]["cachePoint"] == {"type": "default"} + print("request_body: ", json.dumps(request_body, indent=4)) + + # Verify the request body + assert request_body["system"][1]["cachePoint"] == {"type": "default"} @pytest.mark.asyncio async def test_anthropic_cache_control_hook_user_message(): - anthropic_cache_control_hook = AnthropicCacheControlHook() - litellm.callbacks = [anthropic_cache_control_hook] - - # Mock response data - mock_response = MagicMock() - mock_response.json.return_value = { - "output": { - "message": { - "role": "assistant", - "content": "Here is my analysis of the key terms and conditions...", - } + # Use patch.dict to mock environment variables instead of setting them directly + with patch.dict( + os.environ, + { + "AWS_ACCESS_KEY_ID": "fake_access_key_id", + "AWS_SECRET_ACCESS_KEY": "fake_secret_access_key", + "AWS_REGION_NAME": "us-west-2", }, - "stopReason": "stop_sequence", - "usage": { - "inputTokens": 100, - "outputTokens": 200, - "totalTokens": 300, - "cacheReadInputTokens": 100, - "cacheWriteInputTokens": 200, - }, - } - mock_response.status_code = 200 + ): + anthropic_cache_control_hook = AnthropicCacheControlHook() + litellm.callbacks = [anthropic_cache_control_hook] - # Mock AsyncHTTPHandler.post method - client = AsyncHTTPHandler() - with patch.object(client, "post", return_value=mock_response) as mock_post: - response = await litellm.acompletion( - model="bedrock/anthropic.claude-3-5-haiku-20241022-v1:0", - messages=[ - { + # Mock response data + mock_response = MagicMock() + mock_response.json.return_value = { + "output": { + "message": { "role": "assistant", - "content": [ - { - "type": "text", - "text": "You are an AI assistant tasked with analyzing legal documents.", - }, - ], - }, - { - "role": "user", - "content": "what are the key terms and conditions in this agreement? ", - }, - ], - cache_control_injection_points=[ - { - "location": "message", - "role": "user", - }, - ], - client=client, - ) - - mock_post.assert_called_once() - request_body = json.loads(mock_post.call_args.kwargs["data"]) - - print("request_body: ", json.dumps(request_body, indent=4)) - - # Verify the request body - assert request_body["messages"][1]["content"][1]["cachePoint"] == { - "type": "default" + "content": "Here is my analysis of the key terms and conditions...", + } + }, + "stopReason": "stop_sequence", + "usage": { + "inputTokens": 100, + "outputTokens": 200, + "totalTokens": 300, + "cacheReadInputTokens": 100, + "cacheWriteInputTokens": 200, + }, } + mock_response.status_code = 200 + + # Mock AsyncHTTPHandler.post method + client = AsyncHTTPHandler() + with patch.object(client, "post", return_value=mock_response) as mock_post: + response = await litellm.acompletion( + model="bedrock/anthropic.claude-3-5-haiku-20241022-v1:0", + messages=[ + { + "role": "assistant", + "content": [ + { + "type": "text", + "text": "You are an AI assistant tasked with analyzing legal documents.", + }, + ], + }, + { + "role": "user", + "content": "what are the key terms and conditions in this agreement? ", + }, + ], + cache_control_injection_points=[ + { + "location": "message", + "role": "user", + }, + ], + client=client, + ) + + mock_post.assert_called_once() + request_body = json.loads(mock_post.call_args.kwargs["data"]) + + print("request_body: ", json.dumps(request_body, indent=4)) + + # Verify the request body + assert request_body["messages"][1]["content"][1]["cachePoint"] == { + "type": "default" + }