Compare commits

...
Sign in to create a new pull request.

3 commits

Author SHA1 Message Date
Ishaan Jaff
eed3cee54b fix pass through tests 2024-11-20 17:45:50 -08:00
Ishaan Jaff
61fee8eca2 fix swagger 2024-11-20 17:39:21 -08:00
Ishaan Jaff
1d44660635 tests - add e2e tests for anthropic pass through 2024-11-20 17:38:01 -08:00
4 changed files with 79 additions and 0 deletions

View file

@ -1093,6 +1093,7 @@ jobs:
pip install "asyncio==3.4.3" pip install "asyncio==3.4.3"
pip install "PyGithub==1.59.1" pip install "PyGithub==1.59.1"
pip install "google-cloud-aiplatform==1.59.0" pip install "google-cloud-aiplatform==1.59.0"
pip install anthropic
- run: - run:
name: Build Docker image name: Build Docker image
command: docker build -t my-app:latest -f ./docker/Dockerfile.database . command: docker build -t my-app:latest -f ./docker/Dockerfile.database .
@ -1104,6 +1105,7 @@ jobs:
-e DATABASE_URL=$PROXY_DATABASE_URL \ -e DATABASE_URL=$PROXY_DATABASE_URL \
-e LITELLM_MASTER_KEY="sk-1234" \ -e LITELLM_MASTER_KEY="sk-1234" \
-e OPENAI_API_KEY=$OPENAI_API_KEY \ -e OPENAI_API_KEY=$OPENAI_API_KEY \
-e ANTHROPIC_API_KEY=$ANTHROPIC_API_KEY \
-e LITELLM_LICENSE=$LITELLM_LICENSE \ -e LITELLM_LICENSE=$LITELLM_LICENSE \
--name my-app \ --name my-app \
-v $(pwd)/litellm/proxy/example_config_yaml/pass_through_config.yaml:/app/config.yaml \ -v $(pwd)/litellm/proxy/example_config_yaml/pass_through_config.yaml:/app/config.yaml \

View file

@ -5654,6 +5654,7 @@ async def moderations(
tags=["[beta] Anthropic `/v1/messages`"], tags=["[beta] Anthropic `/v1/messages`"],
dependencies=[Depends(user_api_key_auth)], dependencies=[Depends(user_api_key_auth)],
response_model=AnthropicResponse, response_model=AnthropicResponse,
include_in_schema=False,
) )
async def anthropic_response( # noqa: PLR0915 async def anthropic_response( # noqa: PLR0915
anthropic_data: AnthropicMessagesRequest, anthropic_data: AnthropicMessagesRequest,

View file

@ -0,0 +1,38 @@
"""
This test ensures that the proxy can passthrough anthropic requests
"""
import pytest
import anthropic
client = anthropic.Anthropic(
base_url="http://0.0.0.0:4000/anthropic", api_key="sk-1234"
)
def test_anthropic_basic_completion():
print("making basic completion request to anthropic passthrough")
response = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
messages=[{"role": "user", "content": "Say 'hello test' and nothing else"}],
)
print(response)
def test_anthropic_streaming():
print("making streaming request to anthropic passthrough")
collected_output = []
with client.messages.stream(
max_tokens=10,
messages=[
{"role": "user", "content": "Say 'hello stream test' and nothing else"}
],
model="claude-3-5-sonnet-20241022",
) as stream:
for text in stream.text_stream:
collected_output.append(text)
full_response = "".join(collected_output)
print(full_response)

View file

@ -0,0 +1,38 @@
"""
This test ensures that the proxy can passthrough anthropic requests
"""
import pytest
import anthropic
client = anthropic.Anthropic(
base_url="http://0.0.0.0:4000/anthropic", api_key="sk-1234"
)
def test_anthropic_basic_completion():
print("making basic completion request to anthropic passthrough")
response = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
messages=[{"role": "user", "content": "Say 'hello test' and nothing else"}],
)
print(response)
def test_anthropic_streaming():
print("making streaming request to anthropic passthrough")
collected_output = []
with client.messages.stream(
max_tokens=10,
messages=[
{"role": "user", "content": "Say 'hello stream test' and nothing else"}
],
model="claude-3-5-sonnet-20241022",
) as stream:
for text in stream.text_stream:
collected_output.append(text)
full_response = "".join(collected_output)
print(full_response)