fix(utils.py): fix text completion streaming

This commit is contained in:
Krrish Dholakia 2024-03-25 16:47:17 -07:00
parent 9e1e97528d
commit dc2c4af631
2 changed files with 2 additions and 2 deletions

View file

@ -2907,6 +2907,7 @@ def test_async_text_completion_stream():
async def test_get_response(): async def test_get_response():
try: try:
litellm.set_verbose = True
response = await litellm.atext_completion( response = await litellm.atext_completion(
model="gpt-3.5-turbo-instruct", model="gpt-3.5-turbo-instruct",
prompt="good morning", prompt="good morning",
@ -2930,7 +2931,7 @@ def test_async_text_completion_stream():
asyncio.run(test_get_response()) asyncio.run(test_get_response())
test_async_text_completion_stream() # test_async_text_completion_stream()
@pytest.mark.asyncio @pytest.mark.asyncio

View file

@ -8899,7 +8899,6 @@ class CustomStreamWrapper:
if data_json["choices"][0].get("finish_reason", None): if data_json["choices"][0].get("finish_reason", None):
is_finished = True is_finished = True
finish_reason = data_json["choices"][0]["finish_reason"] finish_reason = data_json["choices"][0]["finish_reason"]
self.sent_last_chunk = True
print_verbose( print_verbose(
f"text: {text}; is_finished: {is_finished}; finish_reason: {finish_reason}" f"text: {text}; is_finished: {is_finished}; finish_reason: {finish_reason}"
) )