mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
fix(llm_caching_handler.py): handle no current event loop error
This commit is contained in:
parent
7a8165eaba
commit
b8d1166e0c
1 changed files with 6 additions and 3 deletions
|
@ -14,9 +14,12 @@ class LLMClientCache(InMemoryCache):
|
|||
Add the event loop to the cache key, to prevent event loop closed errors.
|
||||
If none, use the key as is.
|
||||
"""
|
||||
event_loop = asyncio.get_event_loop()
|
||||
stringified_event_loop = str(id(event_loop))
|
||||
return f"{key}-{stringified_event_loop}"
|
||||
try:
|
||||
event_loop = asyncio.get_event_loop()
|
||||
stringified_event_loop = str(id(event_loop))
|
||||
return f"{key}-{stringified_event_loop}"
|
||||
except Exception: # handle no current event loop
|
||||
return key
|
||||
|
||||
def set_cache(self, key, value, **kwargs):
|
||||
key = self.update_cache_key_with_event_loop(key)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue