mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
Add anthropic thinking + reasoning content support (#8778)
* feat(anthropic/chat/transformation.py): add anthropic thinking param support * feat(anthropic/chat/transformation.py): support returning thinking content for anthropic on streaming responses * feat(anthropic/chat/transformation.py): return list of thinking blocks (include block signature) allows usage in tool call responses * fix(types/utils.py): extract and map reasoning_content from anthropic as content str * test: add testing to ensure thinking_blocks are returned at the root * fix(anthropic/chat/handler.py): return thinking blocks on streaming - include signature * feat(factory.py): handle anthropic thinking blocks translation if in assistant response * test: handle openai internal instability * test: handle openai audio instability * ci: pin anthropic dep * test: handle openai audio instability * fix: fix linting error * refactor(anthropic/chat/transformation.py): refactor function to remain <50 LOC * fix: fix linting error * fix: fix linting error * fix: fix linting error * fix: fix linting error
This commit is contained in:
parent
9914c166b7
commit
142b195784
16 changed files with 332 additions and 62 deletions
|
@ -1320,13 +1320,19 @@ def test_standard_logging_payload_audio(turn_off_message_logging, stream):
|
|||
with patch.object(
|
||||
customHandler, "log_success_event", new=MagicMock()
|
||||
) as mock_client:
|
||||
response = litellm.completion(
|
||||
model="gpt-4o-audio-preview",
|
||||
modalities=["text", "audio"],
|
||||
audio={"voice": "alloy", "format": "pcm16"},
|
||||
messages=[{"role": "user", "content": "response in 1 word - yes or no"}],
|
||||
stream=stream,
|
||||
)
|
||||
try:
|
||||
response = litellm.completion(
|
||||
model="gpt-4o-audio-preview",
|
||||
modalities=["text", "audio"],
|
||||
audio={"voice": "alloy", "format": "pcm16"},
|
||||
messages=[
|
||||
{"role": "user", "content": "response in 1 word - yes or no"}
|
||||
],
|
||||
stream=stream,
|
||||
)
|
||||
except Exception as e:
|
||||
if "openai-internal" in str(e):
|
||||
pytest.skip("Skipping test due to openai-internal error")
|
||||
|
||||
if stream:
|
||||
for chunk in response:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue