(feat) add cache context manager

This commit is contained in:
ishaan-jaff 2023-12-30 19:32:51 +05:30
parent 231148ed73
commit ddddfe6602
2 changed files with 58 additions and 0 deletions

View file

@ -71,6 +71,7 @@ from .llms.prompt_templates.factory import (
import tiktoken
from concurrent.futures import ThreadPoolExecutor
from typing import Callable, List, Optional, Dict, Union, Mapping
from .caching import enable_cache, disable_cache
encoding = tiktoken.get_encoding("cl100k_base")
from litellm.utils import (