forked from phoenix/litellm-mirror
update tests
This commit is contained in:
parent
b64755d2a1
commit
673105c88f
2 changed files with 27 additions and 5 deletions
22
litellm/proxy/tests/test_anthropic_sdk.py
Normal file
22
litellm/proxy/tests/test_anthropic_sdk.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
import os
|
||||
|
||||
from anthropic import Anthropic
|
||||
|
||||
client = Anthropic(
|
||||
# This is the default and can be omitted
|
||||
base_url="http://localhost:4000",
|
||||
# this is a litellm proxy key :) - not a real anthropic key
|
||||
api_key="sk-s4xN1IiLTCytwtZFJaYQrA",
|
||||
)
|
||||
|
||||
message = client.messages.create(
|
||||
max_tokens=1024,
|
||||
messages=[
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Hello, Claude",
|
||||
}
|
||||
],
|
||||
model="claude-3-opus-20240229",
|
||||
)
|
||||
print(message.content)
|
|
@ -19,7 +19,7 @@ import pytest
|
|||
|
||||
import litellm
|
||||
from litellm.proxy._types import LiteLLMRoutes
|
||||
from litellm.proxy.auth.auth_utils import is_openai_route
|
||||
from litellm.proxy.auth.auth_utils import is_llm_api_route
|
||||
from litellm.proxy.proxy_server import app
|
||||
|
||||
# Configure logging
|
||||
|
@ -77,8 +77,8 @@ def test_routes_on_litellm_proxy():
|
|||
("/v1/non_existent_endpoint", False),
|
||||
],
|
||||
)
|
||||
def test_is_openai_route(route: str, expected: bool):
|
||||
assert is_openai_route(route) == expected
|
||||
def test_is_llm_api_route(route: str, expected: bool):
|
||||
assert is_llm_api_route(route) == expected
|
||||
|
||||
|
||||
# Test case for routes that are similar but should return False
|
||||
|
@ -91,5 +91,5 @@ def test_is_openai_route(route: str, expected: bool):
|
|||
"/engines/model/invalid/completions",
|
||||
],
|
||||
)
|
||||
def test_is_openai_route_similar_but_false(route: str):
|
||||
assert is_openai_route(route) == False
|
||||
def test_is_llm_api_route_similar_but_false(route: str):
|
||||
assert is_llm_api_route(route) == False
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue