remove print statements

This commit is contained in:
Krrish Dholakia 2023-09-27 17:06:41 -07:00
parent 59999e4399
commit 1f2a12fe1f
3 changed files with 1 additions and 4 deletions

View file

@ -27,7 +27,6 @@ def test_streaming_mock_request():
response = litellm.mock_completion(model=model, messages=messages, stream=True)
complete_response = ""
for chunk in response:
print(f"chunk: {chunk}")
complete_response += chunk["choices"][0]["delta"]["content"]
if complete_response == "":
raise Exception("Empty response received")

View file

@ -2922,7 +2922,6 @@ class CustomStreamWrapper:
raise ValueError(f"Unable to parse response. Original response: {chunk}")
def handle_replicate_chunk(self, chunk):
print(f"chunk: {chunk}")
try:
text = ""
is_finished = False
@ -2941,7 +2940,6 @@ class CustomStreamWrapper:
def handle_openai_text_completion_chunk(self, chunk):
try:
print(f"chunk: {chunk}")
return chunk["choices"][0]["text"]
except:
raise ValueError(f"Unable to parse response. Original response: {chunk}")

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "litellm"
version = "0.1.784"
version = "0.1.785"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT License"