diff --git a/dist/litellm-0.1.645-py3-none-any.whl b/dist/litellm-0.1.645-py3-none-any.whl new file mode 100644 index 000000000..d4677fafb Binary files /dev/null and b/dist/litellm-0.1.645-py3-none-any.whl differ diff --git a/dist/litellm-0.1.645.tar.gz b/dist/litellm-0.1.645.tar.gz new file mode 100644 index 000000000..db8ed012a Binary files /dev/null and b/dist/litellm-0.1.645.tar.gz differ diff --git a/litellm/utils.py b/litellm/utils.py index b9d636506..f19939c7b 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -2451,11 +2451,7 @@ class CustomStreamWrapper: def __next__(self): try: -<<<<<<< HEAD - completion_obj = {"content": "", "role": ""} # default to role being assistant -======= completion_obj = {"content": ""} # default to role being assistant ->>>>>>> 31d771b (fix streaming error) if self.model in litellm.anthropic_models: chunk = next(self.completion_stream) completion_obj["content"] = self.handle_anthropic_chunk(chunk) diff --git a/pyproject.toml b/pyproject.toml index 10039fe8e..98a4c3975 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.1.645" +version = "0.1.646" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License"