mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
fix storing request status in mem
This commit is contained in:
parent
86b311eeca
commit
36be9967d1
1 changed files with 4 additions and 1 deletions
|
@ -284,11 +284,14 @@ class ProxyLogging:
|
|||
if self.alerting is None:
|
||||
return
|
||||
|
||||
# current alerting threshold
|
||||
alerting_threshold = self.alerting_threshold or 120
|
||||
|
||||
await self.internal_usage_cache.async_set_cache(
|
||||
key="request_status:{}".format(litellm_call_id),
|
||||
value=status,
|
||||
local_only=True,
|
||||
ttl=120,
|
||||
ttl=alerting_threshold,
|
||||
)
|
||||
|
||||
# The actual implementation of the function
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue