fix streaming format

This commit is contained in:
ishaan-jaff 2023-08-09 13:41:11 -07:00
parent 731b4fd985
commit c83ccb66ca
2 changed files with 8 additions and 5 deletions

View file

@ -164,8 +164,8 @@ def test_completion_replicate_stability_stream():
try:
response = completion(model=model_name, messages=messages, stream=True, replicate=True)
# Add any assertions here to check the response
for result in response:
print(result)
for chunk in response:
print(chunk['choices'][0]['delta'])
print(response)
except Exception as e:
pytest.fail(f"Error occurred: {e}")

View file

@ -625,13 +625,16 @@ class CustomStreamWrapper:
return self
def __next__(self):
completion_obj ={ "role": "assistant", "content": ""}
if self.model in litellm.anthropic_models:
chunk = next(self.completion_stream)
return {"choices": [{"delta": chunk.completion}]}
completion_obj["content"] = chunk.completion
elif self.model == "replicate":
chunk = next(self.completion_stream)
return {"choices": [{"delta": chunk}]}
completion_obj["content"] = chunk
elif self.model in litellm.cohere_models:
chunk = next(self.completion_stream)
return {"choices": [{"delta": chunk.text}]}
completion_obj["content"] = chunk.text
# return this for all models
return {"choices": [{"delta": completion_obj}]}