mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
fix(llm_caching_handler.py): handle no current event loop error
This commit is contained in:
parent
ac3504e000
commit
a174ef2bf4
1 changed files with 6 additions and 3 deletions
|
@ -14,9 +14,12 @@ class LLMClientCache(InMemoryCache):
|
||||||
Add the event loop to the cache key, to prevent event loop closed errors.
|
Add the event loop to the cache key, to prevent event loop closed errors.
|
||||||
If none, use the key as is.
|
If none, use the key as is.
|
||||||
"""
|
"""
|
||||||
|
try:
|
||||||
event_loop = asyncio.get_event_loop()
|
event_loop = asyncio.get_event_loop()
|
||||||
stringified_event_loop = str(id(event_loop))
|
stringified_event_loop = str(id(event_loop))
|
||||||
return f"{key}-{stringified_event_loop}"
|
return f"{key}-{stringified_event_loop}"
|
||||||
|
except Exception: # handle no current event loop
|
||||||
|
return key
|
||||||
|
|
||||||
def set_cache(self, key, value, **kwargs):
|
def set_cache(self, key, value, **kwargs):
|
||||||
key = self.update_cache_key_with_event_loop(key)
|
key = self.update_cache_key_with_event_loop(key)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue