mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix(proxy_server.py): fix setting router redis cache, if cache enable… (#8859)
* fix(proxy_server.py): fix setting router redis cache, if cache enabled on litellm_settings enables configurations like namespace to just work * fix(redis_cache.py): fix key for async increment, to use the set namespace prevents collisions if redis instance shared across environments * fix load tests on litellm release notes * fix caching on main branch (#8858) * fix(streaming_handler.py): fix is delta empty check to handle empty str * fix(streaming_handler.py): fix delta chunk on final response * [Bug]: Deepseek error on proxy after upgrading to 1.61.13-stable (#8860) * fix deepseek error * test_deepseek_provider_async_completion * fix get_complete_url * bump: version 1.61.17 → 1.61.18 * bump: version 1.61.18 → 1.61.19 * vertex ai anthropic thinking param support (#8853) * fix(vertex_llm_base.py): handle credentials passed in as dictionary * fix(router.py): support vertex credentials as json dict * test(test_vertex.py): allows easier testing mock anthropic thinking response for vertex ai * test(vertex_ai_partner_models/): don't remove "@" from model breaks anthropic cost calculation * test: move testing * fix: fix linting error * fix: fix linting error * fix(vertex_ai_partner_models/main.py): split @ for codestral model * test: fix test * fix: fix stripping "@" on mistral models * fix: fix test * test: fix test --------- Co-authored-by: Ishaan Jaff <ishaanjaffer0324@gmail.com>
This commit is contained in:
parent
4ee4719a4f
commit
ed65dee59f
4 changed files with 50 additions and 4 deletions
|
@ -1653,10 +1653,6 @@ class ProxyConfig:
|
|||
## INIT PROXY REDIS USAGE CLIENT ##
|
||||
redis_usage_cache = litellm.cache.cache
|
||||
|
||||
## INIT ROUTER REDIS CACHE ##
|
||||
if llm_router is not None:
|
||||
llm_router._update_redis_cache(cache=redis_usage_cache)
|
||||
|
||||
async def get_config(self, config_file_path: Optional[str] = None) -> dict:
|
||||
"""
|
||||
Load config file
|
||||
|
@ -2183,6 +2179,9 @@ class ProxyConfig:
|
|||
),
|
||||
) # type:ignore
|
||||
|
||||
if redis_usage_cache is not None and router.cache.redis_cache is None:
|
||||
router._update_redis_cache(cache=redis_usage_cache)
|
||||
|
||||
# Guardrail settings
|
||||
guardrails_v2: Optional[List[Dict]] = None
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue