fix(types/utils.py): fix streaming function name

This commit is contained in:
Krrish Dholakia 2024-07-10 21:56:47 -07:00
parent 31829855c0
commit 1019355527
2 changed files with 10 additions and 8 deletions

View file

@ -166,7 +166,9 @@ class FunctionCall(OpenAIObject):
class Function(OpenAIObject): class Function(OpenAIObject):
arguments: str arguments: str
name: str name: Optional[
str
] # can be None - openai e.g.: ChoiceDeltaToolCallFunction(arguments='{"', name=None), type=None)
def __init__( def __init__(
self, self,

View file

@ -8126,7 +8126,7 @@ class CustomStreamWrapper:
if chunk.startswith(self.complete_response): if chunk.startswith(self.complete_response):
# Remove last_sent_chunk only if it appears at the start of the new chunk # Remove last_sent_chunk only if it appears at the start of the new chunk
chunk = chunk[len(self.complete_response):] chunk = chunk[len(self.complete_response) :]
self.complete_response += chunk self.complete_response += chunk
return chunk return chunk
@ -9483,8 +9483,8 @@ class CustomStreamWrapper:
model_response.choices[0].delta = Delta(**_json_delta) model_response.choices[0].delta = Delta(**_json_delta)
except Exception as e: except Exception as e:
verbose_logger.error( verbose_logger.error(
"litellm.CustomStreamWrapper.chunk_creator(): Exception occured - {}".format( "litellm.CustomStreamWrapper.chunk_creator(): Exception occured - {}\n{}".format(
str(e) str(e), traceback.format_exc()
) )
) )
verbose_logger.debug(traceback.format_exc()) verbose_logger.debug(traceback.format_exc())
@ -10124,7 +10124,7 @@ def mock_completion_streaming_obj(
model_response, mock_response, model, n: Optional[int] = None model_response, mock_response, model, n: Optional[int] = None
): ):
for i in range(0, len(mock_response), 3): for i in range(0, len(mock_response), 3):
completion_obj = Delta(role="assistant", content=mock_response[i: i + 3]) completion_obj = Delta(role="assistant", content=mock_response[i : i + 3])
if n is None: if n is None:
model_response.choices[0].delta = completion_obj model_response.choices[0].delta = completion_obj
else: else:
@ -10133,7 +10133,7 @@ def mock_completion_streaming_obj(
_streaming_choice = litellm.utils.StreamingChoices( _streaming_choice = litellm.utils.StreamingChoices(
index=j, index=j,
delta=litellm.utils.Delta( delta=litellm.utils.Delta(
role="assistant", content=mock_response[i: i + 3] role="assistant", content=mock_response[i : i + 3]
), ),
) )
_all_choices.append(_streaming_choice) _all_choices.append(_streaming_choice)
@ -10145,7 +10145,7 @@ async def async_mock_completion_streaming_obj(
model_response, mock_response, model, n: Optional[int] = None model_response, mock_response, model, n: Optional[int] = None
): ):
for i in range(0, len(mock_response), 3): for i in range(0, len(mock_response), 3):
completion_obj = Delta(role="assistant", content=mock_response[i: i + 3]) completion_obj = Delta(role="assistant", content=mock_response[i : i + 3])
if n is None: if n is None:
model_response.choices[0].delta = completion_obj model_response.choices[0].delta = completion_obj
else: else:
@ -10154,7 +10154,7 @@ async def async_mock_completion_streaming_obj(
_streaming_choice = litellm.utils.StreamingChoices( _streaming_choice = litellm.utils.StreamingChoices(
index=j, index=j,
delta=litellm.utils.Delta( delta=litellm.utils.Delta(
role="assistant", content=mock_response[i: i + 3] role="assistant", content=mock_response[i : i + 3]
), ),
) )
_all_choices.append(_streaming_choice) _all_choices.append(_streaming_choice)