From 0bbe85c74383d906e73e7e2ca3e001b1958c9fa1 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Fri, 15 Sep 2023 14:49:02 -0700 Subject: [PATCH] bump version --- litellm/utils.py | 3 ++- pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/litellm/utils.py b/litellm/utils.py index c23b1e495..e10621b3f 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -2452,7 +2452,7 @@ class CustomStreamWrapper: def __next__(self): try: - completion_obj = {"content": ""} + completion_obj = {"content": "", "role": "assistant"} # default to role being assistant if self.model in litellm.anthropic_models: chunk = next(self.completion_stream) completion_obj["content"] = self.handle_anthropic_chunk(chunk) @@ -2497,6 +2497,7 @@ class CustomStreamWrapper: else: # openai chat/azure models chunk = next(self.completion_stream) completion_obj["content"] = chunk["choices"][0]["delta"]["content"] + completion_obj["role"] = chunk["choices"][0]["delta"]["role"] # return chunk # open ai returns finish_reason, we should just return the openai chunk #completion_obj["content"] = self.handle_openai_chat_completion_chunk(chunk) diff --git a/pyproject.toml b/pyproject.toml index aa40a40ef..95791c4b5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.1.640" +version = "0.1.642" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License"