mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
(feat) proxy - cache allow bool
This commit is contained in:
parent
96c0f1495b
commit
827cc66c15
1 changed files with 4 additions and 1 deletions
|
@ -496,7 +496,10 @@ def load_router_config(router: Optional[litellm.Router], config_file_path: str):
|
||||||
if key == "cache":
|
if key == "cache":
|
||||||
print(f"{blue_color_code}\nSetting Cache on Proxy")
|
print(f"{blue_color_code}\nSetting Cache on Proxy")
|
||||||
from litellm.caching import Cache
|
from litellm.caching import Cache
|
||||||
cache_type = value["type"]
|
if isinstance(value, dict):
|
||||||
|
cache_type = value.get("type", "redis")
|
||||||
|
else:
|
||||||
|
cache_type = "redis" # default to using redis on cache
|
||||||
cache_responses = True
|
cache_responses = True
|
||||||
cache_host = litellm.get_secret("REDIS_HOST", None)
|
cache_host = litellm.get_secret("REDIS_HOST", None)
|
||||||
cache_port = litellm.get_secret("REDIS_PORT", None)
|
cache_port = litellm.get_secret("REDIS_PORT", None)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue