mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
(fix) streaming init response_obj as {}
This commit is contained in:
parent
74d520b1b5
commit
bc691cbbcd
1 changed files with 1 additions and 1 deletions
|
@ -5125,7 +5125,7 @@ class CustomStreamWrapper:
|
||||||
def chunk_creator(self, chunk):
|
def chunk_creator(self, chunk):
|
||||||
model_response = ModelResponse(stream=True, model=self.model)
|
model_response = ModelResponse(stream=True, model=self.model)
|
||||||
model_response.choices[0].finish_reason = None
|
model_response.choices[0].finish_reason = None
|
||||||
response_obj = None
|
response_obj = {}
|
||||||
try:
|
try:
|
||||||
# return this for all models
|
# return this for all models
|
||||||
completion_obj = {"content": ""}
|
completion_obj = {"content": ""}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue