Litellm dev 02 10 2025 p2 (#8443)

* Fixed issue #8246 (#8250)

* Fixed issue #8246

* Added unit tests for discard() and for remove_callback_from_list_by_object()

* fix(openai.py): support dynamic passing of organization param to openai

handles scenario where client-side org id is passed to openai

---------

Co-authored-by: Erez Hadad <erezh@il.ibm.com>
This commit is contained in:
Krish Dholakia 2025-02-10 17:53:46 -08:00 committed by GitHub
parent 47f46f92c8
commit e26d7df91b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 112 additions and 5 deletions

View file

@ -573,6 +573,21 @@ class Router:
litellm.amoderation, call_type="moderation"
)
def discard(self):
"""
Pseudo-destructor to be invoked to clean up global data structures when router is no longer used.
For now, unhook router's callbacks from all lists
"""
litellm.logging_callback_manager.remove_callback_from_list_by_object(litellm._async_success_callback, self)
litellm.logging_callback_manager.remove_callback_from_list_by_object(litellm.success_callback, self)
litellm.logging_callback_manager.remove_callback_from_list_by_object(litellm._async_failure_callback, self)
litellm.logging_callback_manager.remove_callback_from_list_by_object(litellm.failure_callback, self)
litellm.logging_callback_manager.remove_callback_from_list_by_object(litellm.input_callback, self)
litellm.logging_callback_manager.remove_callback_from_list_by_object(litellm.service_callback, self)
litellm.logging_callback_manager.remove_callback_from_list_by_object(litellm.callbacks, self)
def _update_redis_cache(self, cache: RedisCache):
"""
Update the redis cache for the router, if none set.
@ -587,6 +602,7 @@ class Router:
if self.cache.redis_cache is None:
self.cache.redis_cache = cache
def initialize_assistants_endpoint(self):
## INITIALIZE PASS THROUGH ASSISTANTS ENDPOINT ##
self.acreate_assistants = self.factory_function(litellm.acreate_assistants)