fix(streaming_handler.py): fix completion start time tracking (#9688)

* fix(streaming_handler.py): fix completion start time tracking

Fixes https://github.com/BerriAI/litellm/issues/9210

* feat(anthropic/chat/transformation.py): map openai 'reasoning_effort' to anthropic 'thinking' param

Fixes https://github.com/BerriAI/litellm/issues/9022

* feat: map 'reasoning_effort' to 'thinking' param across bedrock + vertex

Closes https://github.com/BerriAI/litellm/issues/9022#issuecomment-2705260808
This commit is contained in:
Krish Dholakia 2025-04-01 22:00:56 -07:00 committed by GitHub
parent e848caaa4d
commit 4f9805c9aa
9 changed files with 135 additions and 11 deletions

View file

@ -5901,9 +5901,10 @@ class ModelResponseIterator:
class ModelResponseListIterator:
def __init__(self, model_responses):
def __init__(self, model_responses, delay: Optional[float] = None):
self.model_responses = model_responses
self.index = 0
self.delay = delay
# Sync iterator
def __iter__(self):
@ -5914,6 +5915,8 @@ class ModelResponseListIterator:
raise StopIteration
model_response = self.model_responses[self.index]
self.index += 1
if self.delay:
time.sleep(self.delay)
return model_response
# Async iterator
@ -5925,6 +5928,8 @@ class ModelResponseListIterator:
raise StopAsyncIteration
model_response = self.model_responses[self.index]
self.index += 1
if self.delay:
await asyncio.sleep(self.delay)
return model_response