diff --git a/litellm/tests/test_class.py b/litellm/tests/test_class.py index cd2ed58fb..909c8e939 100644 --- a/litellm/tests/test_class.py +++ b/litellm/tests/test_class.py @@ -1,13 +1,18 @@ -# #### What this tests #### -# # This tests the LiteLLM Class +#### What this tests #### +# This tests the LiteLLM Class -# import sys, os -# import traceback -# import pytest -# sys.path.insert( -# 0, os.path.abspath("../..") -# ) # Adds the parent directory to the system path -# import litellm +import sys, os +import traceback +import pytest +sys.path.insert( + 0, os.path.abspath("../..") +) # Adds the parent directory to the system path +import litellm + +mr1 = litellm.ModelResponse(stream=True, model="gpt-3.5-turbo") +mr1.choices[0].finish_reason = "stop" +mr2 = litellm.ModelResponse(stream=True, model="gpt-3.5-turbo") +print(mr2.choices[0].finish_reason) # litellm.set_verbose = True # from litellm import Router # import instructor diff --git a/litellm/utils.py b/litellm/utils.py index 268b2f2ce..b63f8645b 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -226,7 +226,10 @@ class Usage(OpenAIObject): class StreamingChoices(OpenAIObject): def __init__(self, finish_reason=None, index=0, delta: Optional[Delta]=None, **params): super(StreamingChoices, self).__init__(**params) - self.finish_reason = finish_reason + if finish_reason: + self.finish_reason = finish_reason + else: + self.finish_reason = None self.index = index if delta: self.delta = delta @@ -4458,7 +4461,7 @@ class CustomStreamWrapper: def chunk_creator(self, chunk): model_response = ModelResponse(stream=True, model=self.model) - print_verbose(f"model_response finish reason 1: {model_response.choices[0].finish_reason}") + model_response.choices[0].finish_reason = None try: # return this for all models completion_obj = {"content": ""}