update streaming

This commit is contained in:
Krrish Dholakia 2023-09-15 15:25:14 -07:00
parent 8fbaf14509
commit fa441e9a3e
5 changed files with 4 additions and 2 deletions

BIN
dist/litellm-0.1.646-py3-none-any.whl vendored Normal file

Binary file not shown.

BIN
dist/litellm-0.1.646.tar.gz vendored Normal file

Binary file not shown.

View file

@ -2495,13 +2495,15 @@ class CustomStreamWrapper:
completion_obj["content"] = self.handle_cohere_chunk(chunk)
else: # openai chat/azure models
chunk = next(self.completion_stream)
completion_obj["content"] = chunk["choices"][0]["delta"]["content"]
completion_obj = chunk["choices"][0]["delta"]
# LOGGING
threading.Thread(target=self.logging_obj.success_handler, args=(completion_obj,)).start()
# return this for all models
model_response = ModelResponse(stream=True)
model_response.choices[0].delta.content = completion_obj["content"]
if "role" in completion_obj:
model_response.choices[0].delta.role = completion_obj["role"]
return model_response
except StopIteration:
raise StopIteration

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "litellm"
version = "0.1.646"
version = "0.1.647"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT License"