mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
fix(utils.py): fix redis cache test
This commit is contained in:
parent
a428501e68
commit
1447621128
2 changed files with 15 additions and 2 deletions
|
@ -1169,7 +1169,7 @@ class Logging:
|
|||
verbose_logger.debug(f"success callbacks: {litellm.success_callback}")
|
||||
## BUILD COMPLETE STREAMED RESPONSE
|
||||
complete_streaming_response = None
|
||||
if self.stream:
|
||||
if self.stream and isinstance(result, ModelResponse):
|
||||
if (
|
||||
result.choices[0].finish_reason is not None
|
||||
): # if it's the last chunk
|
||||
|
@ -8654,6 +8654,8 @@ class CustomStreamWrapper:
|
|||
|
||||
completion_obj["content"] = response_obj["text"]
|
||||
print_verbose(f"completion obj content: {completion_obj['content']}")
|
||||
if hasattr(chunk, "id"):
|
||||
model_response.id = chunk.id
|
||||
if response_obj["is_finished"]:
|
||||
model_response.choices[0].finish_reason = response_obj[
|
||||
"finish_reason"
|
||||
|
@ -8676,6 +8678,8 @@ class CustomStreamWrapper:
|
|||
model_response.system_fingerprint = getattr(
|
||||
response_obj["original_chunk"], "system_fingerprint", None
|
||||
)
|
||||
if hasattr(response_obj["original_chunk"], "id"):
|
||||
model_response.id = response_obj["original_chunk"].id
|
||||
if response_obj["logprobs"] is not None:
|
||||
model_response.choices[0].logprobs = response_obj["logprobs"]
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue