working anthropic API tests

This commit is contained in:
Ishaan Jaff 2025-03-26 17:34:41 -07:00
parent 6bae7c8889
commit c25f61b7ca
4 changed files with 20 additions and 11 deletions

View file

@ -26,7 +26,6 @@ from litellm._logging import (
log_level,
)
import re
from .messages import *
from litellm.constants import (
DEFAULT_BATCH_SIZE,
DEFAULT_FLUSH_INTERVAL_SECONDS,
@ -1028,6 +1027,7 @@ from .proxy.proxy_cli import run_server
from .router import Router
from .assistants.main import *
from .batches.main import *
from .messages import *
from .batch_completion.main import * # type: ignore
from .rerank_api.main import *
from .llms.anthropic.experimental_pass_through.messages.handler import *

View file

@ -71,7 +71,7 @@ async def anthropic_messages(
stop_sequences: Optional[List[str]] = None,
stream: Optional[bool] = False,
system: Optional[str] = None,
temperature: Optional[float] = 1.0,
temperature: Optional[float] = None,
thinking: Optional[Dict] = None,
tool_choice: Optional[Dict] = None,
tools: Optional[List[Dict]] = None,
@ -152,6 +152,7 @@ async def anthropic_messages(
in anthropic_messages_provider_config.get_supported_anthropic_messages_params(
model=model
)
and v is not None
}
request_body["stream"] = stream
request_body["model"] = model

View file

@ -2,11 +2,17 @@
Interface for Anthropic's messages API
Use this to call LLMs in Anthropic /messages Request/Response format
This is an __init__.py file to allow the following interface
- litellm.messages.acreate
- litellm.messages.create
"""
from typing import Dict, List, Optional, Union
from typing import AsyncIterator, Dict, Iterator, List, Optional, Union
from litellm.llms.anthropic.experimental_pass_through.handler import (
from litellm.llms.anthropic.experimental_pass_through.messages.handler import (
anthropic_messages as _async_anthropic_messages,
)
from litellm.types.llms.anthropic_messages.anthropic_response import (
@ -29,7 +35,7 @@ async def acreate(
top_k: Optional[int] = None,
top_p: Optional[float] = None,
**kwargs
) -> AnthropicMessagesResponse:
) -> Union[AnthropicMessagesResponse, AsyncIterator]:
"""
Async wrapper for Anthropic's messages API
@ -85,7 +91,7 @@ async def create(
top_k: Optional[int] = None,
top_p: Optional[float] = None,
**kwargs
) -> AnthropicMessagesResponse:
) -> Union[AnthropicMessagesResponse, Iterator]:
"""
Async wrapper for Anthropic's messages API

View file

@ -8,7 +8,7 @@ import unittest.mock
from unittest.mock import AsyncMock, MagicMock
sys.path.insert(
0, os.path.abspath("../..")
0, os.path.abspath("../../..")
) # Adds the parent directory to the system path
import litellm
import pytest
@ -16,6 +16,7 @@ from dotenv import load_dotenv
from litellm.llms.anthropic.experimental_pass_through.messages.handler import (
anthropic_messages,
)
from typing import Optional
from litellm.types.utils import StandardLoggingPayload
from litellm.integrations.custom_logger import CustomLogger
@ -73,6 +74,7 @@ async def test_anthropic_messages_non_streaming():
"""
Test the anthropic_messages with non-streaming request
"""
litellm._turn_on_debug()
# Get API key from environment
api_key = os.getenv("ANTHROPIC_API_KEY")
if not api_key:
@ -82,7 +84,7 @@ async def test_anthropic_messages_non_streaming():
messages = [{"role": "user", "content": "Hello, can you tell me a short joke?"}]
# Call the handler
response = await anthropic_messages(
response = await litellm.messages.acreate(
messages=messages,
api_key=api_key,
model="claude-3-haiku-20240307",
@ -114,7 +116,7 @@ async def test_anthropic_messages_streaming():
# Call the handler
async_httpx_client = AsyncHTTPHandler()
response = await anthropic_messages(
response = await litellm.messages.acreate(
messages=messages,
api_key=api_key,
model="claude-3-haiku-20240307",
@ -134,7 +136,7 @@ async def test_anthropic_messages_streaming_with_bad_request():
Test the anthropic_messages with streaming request
"""
try:
response = await anthropic_messages(
response = await litellm.messages.acreate(
messages=["hi"],
api_key=os.getenv("ANTHROPIC_API_KEY"),
model="claude-3-haiku-20240307",
@ -458,7 +460,7 @@ async def test_anthropic_messages_with_extra_headers():
mock_client.post = AsyncMock(return_value=mock_response)
# Call the handler with extra_headers and our mocked client
response = await anthropic_messages(
response = await litellm.messages.acreate(
messages=messages,
api_key=api_key,
model="claude-3-haiku-20240307",