Litellm dev 01 25 2025 p2 (#8003)

* fix(base_utils.py): supported nested json schema passed in for anthropic calls

* refactor(base_utils.py): refactor ref parsing to prevent infinite loop

* test(test_openai_endpoints.py): refactor anthropic test to use bedrock

* fix(langfuse_prompt_management.py): add unit test for sync langfuse calls

Resolves https://github.com/BerriAI/litellm/issues/7938#issuecomment-2613293757
This commit is contained in:
Krish Dholakia 2025-01-25 16:50:57 -08:00 committed by GitHub
parent a7b3c664d1
commit 08b124aeb6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 214 additions and 5 deletions

View file

@ -378,6 +378,39 @@ async def test_chat_completion_streaming():
print(f"response_str: {response_str}")
@pytest.mark.asyncio
async def test_chat_completion_anthropic_structured_output():
"""
Ensure nested pydantic output is returned correctly
"""
from pydantic import BaseModel
class CalendarEvent(BaseModel):
name: str
date: str
participants: list[str]
class EventsList(BaseModel):
events: list[CalendarEvent]
messages = [
{"role": "user", "content": "List 5 important events in the XIX century"}
]
client = AsyncOpenAI(api_key="sk-1234", base_url="http://0.0.0.0:4000")
res = await client.beta.chat.completions.parse(
model="bedrock/us.anthropic.claude-3-sonnet-20240229-v1:0",
messages=messages,
response_format=EventsList,
timeout=60,
)
message = res.choices[0].message
if message.parsed:
print(message.parsed.events)
@pytest.mark.asyncio
async def test_chat_completion_old_key():
"""