add customstreapwrapper for openai streaming

This commit is contained in:
ishaan-jaff 2023-08-28 15:28:51 -07:00
parent 0158e12701
commit 69f9c40d80
2 changed files with 9 additions and 1 deletions

View file

@ -195,7 +195,9 @@ def completion(
response = openai.ChatCompletion.create(
engine=model, messages=messages, **optional_params
)
if "stream" in optional_params and optional_params["stream"] == True:
response = CustomStreamWrapper(response, model)
return response
## LOGGING
logging.post_call(
input=messages,
@ -251,6 +253,9 @@ def completion(
response = openai.ChatCompletion.create(
model=model, messages=messages, **optional_params
)
if "stream" in optional_params and optional_params["stream"] == True:
response = CustomStreamWrapper(response, model)
return response
## LOGGING
logging.post_call(
input=messages,

View file

@ -1623,6 +1623,9 @@ class CustomStreamWrapper:
elif self.model in litellm.open_ai_text_completion_models:
chunk = next(self.completion_stream)
completion_obj["content"] = self.handle_openai_text_completion_chunk(chunk)
else: # openai chat/azure models
chunk = next(self.completion_stream)
completion_obj['content'] = chunk['choices']['delta']
# LOGGING
self.logging_obj(completion_obj["content"])
# return this for all models