fix(anthropic.py): support async claude 3 tool calling + streaming

https://github.com/BerriAI/litellm/issues/2644
This commit is contained in:
Krrish Dholakia 2024-03-22 19:56:47 -07:00
parent 2e284a0cfe
commit 42a7588b04
2 changed files with 80 additions and 3 deletions

View file

@ -301,7 +301,7 @@ def completion(
)
streaming_choice.delta = delta_obj
streaming_model_response.choices = [streaming_choice]
completion_stream = model_response_iterator(
completion_stream = ModelResponseIterator(
model_response=streaming_model_response
)
print_verbose(
@ -330,8 +330,30 @@ def completion(
return model_response
def model_response_iterator(model_response):
yield model_response
class ModelResponseIterator:
def __init__(self, model_response):
self.model_response = model_response
self.is_done = False
# Sync iterator
def __iter__(self):
return self
def __next__(self):
if self.is_done:
raise StopIteration
self.is_done = True
return self.model_response
# Async iterator
def __aiter__(self):
return self
async def __anext__(self):
if self.is_done:
raise StopAsyncIteration
self.is_done = True
return self.model_response
def embedding():