mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
(fix) caching use same "created" in response_object
This commit is contained in:
parent
9dc0074613
commit
eb8dba100d
2 changed files with 29 additions and 4 deletions
|
@ -4937,6 +4937,9 @@ async def convert_to_streaming_response_async(response_object: Optional[dict] =
|
|||
if "id" in response_object:
|
||||
model_response_object.id = response_object["id"]
|
||||
|
||||
if "created" in response_object:
|
||||
model_response_object.created = response_object["created"]
|
||||
|
||||
if "system_fingerprint" in response_object:
|
||||
model_response_object.system_fingerprint = response_object["system_fingerprint"]
|
||||
|
||||
|
@ -4981,6 +4984,9 @@ def convert_to_streaming_response(response_object: Optional[dict] = None):
|
|||
if "id" in response_object:
|
||||
model_response_object.id = response_object["id"]
|
||||
|
||||
if "created" in response_object:
|
||||
model_response_object.created = response_object["created"]
|
||||
|
||||
if "system_fingerprint" in response_object:
|
||||
model_response_object.system_fingerprint = response_object["system_fingerprint"]
|
||||
|
||||
|
@ -5036,6 +5042,9 @@ def convert_to_model_response_object(
|
|||
model_response_object.usage.prompt_tokens = response_object["usage"].get("prompt_tokens", 0) # type: ignore
|
||||
model_response_object.usage.total_tokens = response_object["usage"].get("total_tokens", 0) # type: ignore
|
||||
|
||||
if "created" in response_object:
|
||||
model_response_object.created = response_object["created"]
|
||||
|
||||
if "id" in response_object:
|
||||
model_response_object.id = response_object["id"]
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue