From 70cdc16d6f10d18f9070b3d050b9fbb237ac0f3e Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Sat, 30 Dec 2023 19:50:53 +0530 Subject: [PATCH] (feat) cache context manager - update cache --- litellm/caching.py | 23 +++++++++++++++++++++++ litellm/main.py | 2 +- 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/litellm/caching.py b/litellm/caching.py index 7126f2e83..3b429bd52 100644 --- a/litellm/caching.py +++ b/litellm/caching.py @@ -414,6 +414,29 @@ def enable_cache( print_verbose(f"LiteLLM Cache: {vars(litellm.cache)}") +def update_cache( + type: Optional[Literal["local", "redis"]] = "local", + host: Optional[str] = None, + port: Optional[str] = None, + password: Optional[str] = None, + supported_call_types: Optional[ + List[Literal["completion", "acompletion", "embedding", "aembedding"]] + ] = ["completion", "acompletion", "embedding", "aembedding"], + **kwargs, +): + print_verbose("LiteLLM: Updating Cache") + litellm.cache = Cache( + type=type, + host=host, + port=port, + password=password, + supported_call_types=supported_call_types, + **kwargs, + ) + print_verbose(f"LiteLLM: Cache Updated, litellm.cache={litellm.cache}") + print_verbose(f"LiteLLM Cache: {vars(litellm.cache)}") + + def disable_cache(): from contextlib import suppress diff --git a/litellm/main.py b/litellm/main.py index 50f39e549..befb2733e 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -71,7 +71,7 @@ from .llms.prompt_templates.factory import ( import tiktoken from concurrent.futures import ThreadPoolExecutor from typing import Callable, List, Optional, Dict, Union, Mapping -from .caching import enable_cache, disable_cache +from .caching import enable_cache, disable_cache, update_cache encoding = tiktoken.get_encoding("cl100k_base") from litellm.utils import (