mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
fix streaming format
This commit is contained in:
parent
731b4fd985
commit
c83ccb66ca
2 changed files with 8 additions and 5 deletions
|
@ -164,8 +164,8 @@ def test_completion_replicate_stability_stream():
|
|||
try:
|
||||
response = completion(model=model_name, messages=messages, stream=True, replicate=True)
|
||||
# Add any assertions here to check the response
|
||||
for result in response:
|
||||
print(result)
|
||||
for chunk in response:
|
||||
print(chunk['choices'][0]['delta'])
|
||||
print(response)
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
|
|
@ -625,13 +625,16 @@ class CustomStreamWrapper:
|
|||
return self
|
||||
|
||||
def __next__(self):
|
||||
completion_obj ={ "role": "assistant", "content": ""}
|
||||
if self.model in litellm.anthropic_models:
|
||||
chunk = next(self.completion_stream)
|
||||
return {"choices": [{"delta": chunk.completion}]}
|
||||
completion_obj["content"] = chunk.completion
|
||||
elif self.model == "replicate":
|
||||
chunk = next(self.completion_stream)
|
||||
return {"choices": [{"delta": chunk}]}
|
||||
completion_obj["content"] = chunk
|
||||
elif self.model in litellm.cohere_models:
|
||||
chunk = next(self.completion_stream)
|
||||
return {"choices": [{"delta": chunk.text}]}
|
||||
completion_obj["content"] = chunk.text
|
||||
# return this for all models
|
||||
return {"choices": [{"delta": completion_obj}]}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue