mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
fix(router.py): reset caching correctly
This commit is contained in:
parent
6cb4ef5659
commit
3e908bf507
6 changed files with 119 additions and 91 deletions
|
@ -1222,11 +1222,14 @@ class Router:
|
|||
raise ValueError("No models available.")
|
||||
|
||||
def flush_cache(self):
|
||||
litellm.cache = None
|
||||
self.cache.flush_cache()
|
||||
|
||||
def reset(self):
|
||||
## clean up on close
|
||||
litellm.success_callback = []
|
||||
litellm.__async_success_callback = []
|
||||
litellm.failure_callback = []
|
||||
litellm._async_failure_callback = []
|
||||
self.flush_cache()
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue