diff --git a/.circleci/config.yml b/.circleci/config.yml index 0a6327bb3..e0a41310a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1093,6 +1093,7 @@ jobs: pip install "asyncio==3.4.3" pip install "PyGithub==1.59.1" pip install "google-cloud-aiplatform==1.59.0" + pip install anthropic - run: name: Build Docker image command: docker build -t my-app:latest -f ./docker/Dockerfile.database . @@ -1104,6 +1105,7 @@ jobs: -e DATABASE_URL=$PROXY_DATABASE_URL \ -e LITELLM_MASTER_KEY="sk-1234" \ -e OPENAI_API_KEY=$OPENAI_API_KEY \ + -e ANTHROPIC_API_KEY=$ANTHROPIC_API_KEY \ -e LITELLM_LICENSE=$LITELLM_LICENSE \ --name my-app \ -v $(pwd)/litellm/proxy/example_config_yaml/pass_through_config.yaml:/app/config.yaml \ diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 4d4c6a1a2..1551330d1 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -5654,6 +5654,7 @@ async def moderations( tags=["[beta] Anthropic `/v1/messages`"], dependencies=[Depends(user_api_key_auth)], response_model=AnthropicResponse, + include_in_schema=False, ) async def anthropic_response( # noqa: PLR0915 anthropic_data: AnthropicMessagesRequest, diff --git a/tests/anthropic_passthrough/test_anthropic_passthrough.py b/tests/anthropic_passthrough/test_anthropic_passthrough.py new file mode 100644 index 000000000..beffcbc95 --- /dev/null +++ b/tests/anthropic_passthrough/test_anthropic_passthrough.py @@ -0,0 +1,38 @@ +""" +This test ensures that the proxy can passthrough anthropic requests +""" + +import pytest +import anthropic + +client = anthropic.Anthropic( + base_url="http://0.0.0.0:4000/anthropic", api_key="sk-1234" +) + + +def test_anthropic_basic_completion(): + print("making basic completion request to anthropic passthrough") + response = client.messages.create( + model="claude-3-5-sonnet-20241022", + max_tokens=1024, + messages=[{"role": "user", "content": "Say 'hello test' and nothing else"}], + ) + print(response) + + +def test_anthropic_streaming(): + print("making streaming request to anthropic passthrough") + collected_output = [] + + with client.messages.stream( + max_tokens=10, + messages=[ + {"role": "user", "content": "Say 'hello stream test' and nothing else"} + ], + model="claude-3-5-sonnet-20241022", + ) as stream: + for text in stream.text_stream: + collected_output.append(text) + + full_response = "".join(collected_output) + print(full_response) diff --git a/tests/pass_through_tests/test_anthropic_passthrough.py b/tests/pass_through_tests/test_anthropic_passthrough.py new file mode 100644 index 000000000..beffcbc95 --- /dev/null +++ b/tests/pass_through_tests/test_anthropic_passthrough.py @@ -0,0 +1,38 @@ +""" +This test ensures that the proxy can passthrough anthropic requests +""" + +import pytest +import anthropic + +client = anthropic.Anthropic( + base_url="http://0.0.0.0:4000/anthropic", api_key="sk-1234" +) + + +def test_anthropic_basic_completion(): + print("making basic completion request to anthropic passthrough") + response = client.messages.create( + model="claude-3-5-sonnet-20241022", + max_tokens=1024, + messages=[{"role": "user", "content": "Say 'hello test' and nothing else"}], + ) + print(response) + + +def test_anthropic_streaming(): + print("making streaming request to anthropic passthrough") + collected_output = [] + + with client.messages.stream( + max_tokens=10, + messages=[ + {"role": "user", "content": "Say 'hello stream test' and nothing else"} + ], + model="claude-3-5-sonnet-20241022", + ) as stream: + for text in stream.text_stream: + collected_output.append(text) + + full_response = "".join(collected_output) + print(full_response)