use new anthropic interface

This commit is contained in:
Ishaan Jaff 2025-03-31 14:31:09 -07:00
parent 01d85d5fb7
commit bd39a395f1
4 changed files with 9 additions and 5 deletions

View file

@ -1026,7 +1026,7 @@ from .proxy.proxy_cli import run_server
from .router import Router
from .assistants.main import *
from .batches.main import *
from .messages import *
from .anthropic import *
from .batch_completion.main import * # type: ignore
from .rerank_api.main import *
from .llms.anthropic.experimental_pass_through.messages.handler import *

View file

@ -0,0 +1,4 @@
"""
Anthropic module for LiteLLM
"""
from .messages import acreate, create

View file

@ -84,7 +84,7 @@ async def test_anthropic_messages_non_streaming():
messages = [{"role": "user", "content": "Hello, can you tell me a short joke?"}]
# Call the handler
response = await litellm.messages.acreate(
response = await litellm.anthropic.messages.acreate(
messages=messages,
api_key=api_key,
model="claude-3-haiku-20240307",
@ -116,7 +116,7 @@ async def test_anthropic_messages_streaming():
# Call the handler
async_httpx_client = AsyncHTTPHandler()
response = await litellm.messages.acreate(
response = await litellm.anthropic.messages.acreate(
messages=messages,
api_key=api_key,
model="claude-3-haiku-20240307",
@ -136,7 +136,7 @@ async def test_anthropic_messages_streaming_with_bad_request():
Test the anthropic_messages with streaming request
"""
try:
response = await litellm.messages.acreate(
response = await litellm.anthropic.messages.acreate(
messages=["hi"],
api_key=os.getenv("ANTHROPIC_API_KEY"),
model="claude-3-haiku-20240307",
@ -460,7 +460,7 @@ async def test_anthropic_messages_with_extra_headers():
mock_client.post = AsyncMock(return_value=mock_response)
# Call the handler with extra_headers and our mocked client
response = await litellm.messages.acreate(
response = await litellm.anthropic.messages.acreate(
messages=messages,
api_key=api_key,
model="claude-3-haiku-20240307",