forked from phoenix/litellm-mirror
fix(utils.py): streaming
This commit is contained in:
parent
fe9c1e2b45
commit
dd925d3de3
2 changed files with 19 additions and 11 deletions
|
@ -1,13 +1,18 @@
|
|||
# #### What this tests ####
|
||||
# # This tests the LiteLLM Class
|
||||
#### What this tests ####
|
||||
# This tests the LiteLLM Class
|
||||
|
||||
# import sys, os
|
||||
# import traceback
|
||||
# import pytest
|
||||
# sys.path.insert(
|
||||
# 0, os.path.abspath("../..")
|
||||
# ) # Adds the parent directory to the system path
|
||||
# import litellm
|
||||
import sys, os
|
||||
import traceback
|
||||
import pytest
|
||||
sys.path.insert(
|
||||
0, os.path.abspath("../..")
|
||||
) # Adds the parent directory to the system path
|
||||
import litellm
|
||||
|
||||
mr1 = litellm.ModelResponse(stream=True, model="gpt-3.5-turbo")
|
||||
mr1.choices[0].finish_reason = "stop"
|
||||
mr2 = litellm.ModelResponse(stream=True, model="gpt-3.5-turbo")
|
||||
print(mr2.choices[0].finish_reason)
|
||||
# litellm.set_verbose = True
|
||||
# from litellm import Router
|
||||
# import instructor
|
||||
|
|
|
@ -226,7 +226,10 @@ class Usage(OpenAIObject):
|
|||
class StreamingChoices(OpenAIObject):
|
||||
def __init__(self, finish_reason=None, index=0, delta: Optional[Delta]=None, **params):
|
||||
super(StreamingChoices, self).__init__(**params)
|
||||
self.finish_reason = finish_reason
|
||||
if finish_reason:
|
||||
self.finish_reason = finish_reason
|
||||
else:
|
||||
self.finish_reason = None
|
||||
self.index = index
|
||||
if delta:
|
||||
self.delta = delta
|
||||
|
@ -4458,7 +4461,7 @@ class CustomStreamWrapper:
|
|||
|
||||
def chunk_creator(self, chunk):
|
||||
model_response = ModelResponse(stream=True, model=self.model)
|
||||
print_verbose(f"model_response finish reason 1: {model_response.choices[0].finish_reason}")
|
||||
model_response.choices[0].finish_reason = None
|
||||
try:
|
||||
# return this for all models
|
||||
completion_obj = {"content": ""}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue