forked from phoenix/litellm-mirror
(testing) - add e2e tests for anthropic pass through endpoints (#6840)
* tests - add e2e tests for anthropic pass through * fix swagger * fix pass through tests
This commit is contained in:
parent
c107bae7ae
commit
cc1f8ff0ba
4 changed files with 79 additions and 0 deletions
|
@ -1093,6 +1093,7 @@ jobs:
|
||||||
pip install "asyncio==3.4.3"
|
pip install "asyncio==3.4.3"
|
||||||
pip install "PyGithub==1.59.1"
|
pip install "PyGithub==1.59.1"
|
||||||
pip install "google-cloud-aiplatform==1.59.0"
|
pip install "google-cloud-aiplatform==1.59.0"
|
||||||
|
pip install anthropic
|
||||||
- run:
|
- run:
|
||||||
name: Build Docker image
|
name: Build Docker image
|
||||||
command: docker build -t my-app:latest -f ./docker/Dockerfile.database .
|
command: docker build -t my-app:latest -f ./docker/Dockerfile.database .
|
||||||
|
@ -1104,6 +1105,7 @@ jobs:
|
||||||
-e DATABASE_URL=$PROXY_DATABASE_URL \
|
-e DATABASE_URL=$PROXY_DATABASE_URL \
|
||||||
-e LITELLM_MASTER_KEY="sk-1234" \
|
-e LITELLM_MASTER_KEY="sk-1234" \
|
||||||
-e OPENAI_API_KEY=$OPENAI_API_KEY \
|
-e OPENAI_API_KEY=$OPENAI_API_KEY \
|
||||||
|
-e ANTHROPIC_API_KEY=$ANTHROPIC_API_KEY \
|
||||||
-e LITELLM_LICENSE=$LITELLM_LICENSE \
|
-e LITELLM_LICENSE=$LITELLM_LICENSE \
|
||||||
--name my-app \
|
--name my-app \
|
||||||
-v $(pwd)/litellm/proxy/example_config_yaml/pass_through_config.yaml:/app/config.yaml \
|
-v $(pwd)/litellm/proxy/example_config_yaml/pass_through_config.yaml:/app/config.yaml \
|
||||||
|
|
|
@ -5654,6 +5654,7 @@ async def moderations(
|
||||||
tags=["[beta] Anthropic `/v1/messages`"],
|
tags=["[beta] Anthropic `/v1/messages`"],
|
||||||
dependencies=[Depends(user_api_key_auth)],
|
dependencies=[Depends(user_api_key_auth)],
|
||||||
response_model=AnthropicResponse,
|
response_model=AnthropicResponse,
|
||||||
|
include_in_schema=False,
|
||||||
)
|
)
|
||||||
async def anthropic_response( # noqa: PLR0915
|
async def anthropic_response( # noqa: PLR0915
|
||||||
anthropic_data: AnthropicMessagesRequest,
|
anthropic_data: AnthropicMessagesRequest,
|
||||||
|
|
38
tests/anthropic_passthrough/test_anthropic_passthrough.py
Normal file
38
tests/anthropic_passthrough/test_anthropic_passthrough.py
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
"""
|
||||||
|
This test ensures that the proxy can passthrough anthropic requests
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import anthropic
|
||||||
|
|
||||||
|
client = anthropic.Anthropic(
|
||||||
|
base_url="http://0.0.0.0:4000/anthropic", api_key="sk-1234"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_anthropic_basic_completion():
|
||||||
|
print("making basic completion request to anthropic passthrough")
|
||||||
|
response = client.messages.create(
|
||||||
|
model="claude-3-5-sonnet-20241022",
|
||||||
|
max_tokens=1024,
|
||||||
|
messages=[{"role": "user", "content": "Say 'hello test' and nothing else"}],
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
|
||||||
|
|
||||||
|
def test_anthropic_streaming():
|
||||||
|
print("making streaming request to anthropic passthrough")
|
||||||
|
collected_output = []
|
||||||
|
|
||||||
|
with client.messages.stream(
|
||||||
|
max_tokens=10,
|
||||||
|
messages=[
|
||||||
|
{"role": "user", "content": "Say 'hello stream test' and nothing else"}
|
||||||
|
],
|
||||||
|
model="claude-3-5-sonnet-20241022",
|
||||||
|
) as stream:
|
||||||
|
for text in stream.text_stream:
|
||||||
|
collected_output.append(text)
|
||||||
|
|
||||||
|
full_response = "".join(collected_output)
|
||||||
|
print(full_response)
|
38
tests/pass_through_tests/test_anthropic_passthrough.py
Normal file
38
tests/pass_through_tests/test_anthropic_passthrough.py
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
"""
|
||||||
|
This test ensures that the proxy can passthrough anthropic requests
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import anthropic
|
||||||
|
|
||||||
|
client = anthropic.Anthropic(
|
||||||
|
base_url="http://0.0.0.0:4000/anthropic", api_key="sk-1234"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_anthropic_basic_completion():
|
||||||
|
print("making basic completion request to anthropic passthrough")
|
||||||
|
response = client.messages.create(
|
||||||
|
model="claude-3-5-sonnet-20241022",
|
||||||
|
max_tokens=1024,
|
||||||
|
messages=[{"role": "user", "content": "Say 'hello test' and nothing else"}],
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
|
||||||
|
|
||||||
|
def test_anthropic_streaming():
|
||||||
|
print("making streaming request to anthropic passthrough")
|
||||||
|
collected_output = []
|
||||||
|
|
||||||
|
with client.messages.stream(
|
||||||
|
max_tokens=10,
|
||||||
|
messages=[
|
||||||
|
{"role": "user", "content": "Say 'hello stream test' and nothing else"}
|
||||||
|
],
|
||||||
|
model="claude-3-5-sonnet-20241022",
|
||||||
|
) as stream:
|
||||||
|
for text in stream.text_stream:
|
||||||
|
collected_output.append(text)
|
||||||
|
|
||||||
|
full_response = "".join(collected_output)
|
||||||
|
print(full_response)
|
Loading…
Add table
Add a link
Reference in a new issue