mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
fix(utils.py): fixing sync streaming for caching
This commit is contained in:
parent
788e24bd83
commit
2a6a72a0e7
1 changed files with 3 additions and 8 deletions
|
@ -1196,7 +1196,8 @@ class Logging:
|
|||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
)
|
||||
except:
|
||||
except Exception as e:
|
||||
|
||||
complete_streaming_response = None
|
||||
else:
|
||||
self.sync_streaming_chunks.append(result)
|
||||
|
@ -8903,13 +8904,7 @@ class CustomStreamWrapper:
|
|||
response: Optional[ModelResponse] = self.chunk_creator(chunk=chunk)
|
||||
print_verbose(f"PROCESSED CHUNK POST CHUNK CREATOR: {response}")
|
||||
|
||||
if response is None or (
|
||||
isinstance(response, ModelResponse)
|
||||
and isinstance(response.choices[0], StreamingChoices)
|
||||
and response.choices[0].delta.content is None
|
||||
and response.choices[0].delta.function_call is None
|
||||
and response.choices[0].delta.tool_calls is None
|
||||
):
|
||||
if response is None:
|
||||
continue
|
||||
## LOGGING
|
||||
threading.Thread(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue