diff --git a/litellm/proxy/tests/test_anthropic_sdk.py b/litellm/proxy/tests/test_anthropic_sdk.py new file mode 100644 index 000000000..073fafb07 --- /dev/null +++ b/litellm/proxy/tests/test_anthropic_sdk.py @@ -0,0 +1,22 @@ +import os + +from anthropic import Anthropic + +client = Anthropic( + # This is the default and can be omitted + base_url="http://localhost:4000", + # this is a litellm proxy key :) - not a real anthropic key + api_key="sk-s4xN1IiLTCytwtZFJaYQrA", +) + +message = client.messages.create( + max_tokens=1024, + messages=[ + { + "role": "user", + "content": "Hello, Claude", + } + ], + model="claude-3-opus-20240229", +) +print(message.content) diff --git a/litellm/tests/test_proxy_routes.py b/litellm/tests/test_proxy_routes.py index 776ad1e78..0e3f6339c 100644 --- a/litellm/tests/test_proxy_routes.py +++ b/litellm/tests/test_proxy_routes.py @@ -19,7 +19,7 @@ import pytest import litellm from litellm.proxy._types import LiteLLMRoutes -from litellm.proxy.auth.auth_utils import is_openai_route +from litellm.proxy.auth.auth_utils import is_llm_api_route from litellm.proxy.proxy_server import app # Configure logging @@ -77,8 +77,8 @@ def test_routes_on_litellm_proxy(): ("/v1/non_existent_endpoint", False), ], ) -def test_is_openai_route(route: str, expected: bool): - assert is_openai_route(route) == expected +def test_is_llm_api_route(route: str, expected: bool): + assert is_llm_api_route(route) == expected # Test case for routes that are similar but should return False @@ -91,5 +91,5 @@ def test_is_openai_route(route: str, expected: bool): "/engines/model/invalid/completions", ], ) -def test_is_openai_route_similar_but_false(route: str): - assert is_openai_route(route) == False +def test_is_llm_api_route_similar_but_false(route: str): + assert is_llm_api_route(route) == False