forked from phoenix/litellm-mirror
fix install on python 3.8
This commit is contained in:
parent
b13a93d9bc
commit
effc7579ac
2 changed files with 3 additions and 2 deletions
|
@ -89,7 +89,7 @@ jobs:
|
|||
name: Linting Testing
|
||||
command: |
|
||||
cd litellm
|
||||
python -m pip install types-requests types-setuptools types-redis types-PyYAML
|
||||
python -m pip install types-requests types-setuptools types-redis types-PyYAML types-cachetools
|
||||
if ! python -m mypy . --ignore-missing-imports; then
|
||||
echo "mypy detected errors"
|
||||
exit 1
|
||||
|
|
|
@ -17,7 +17,6 @@ import traceback
|
|||
from datetime import timedelta
|
||||
from typing import Any, BinaryIO, List, Literal, Optional, Union
|
||||
|
||||
from cachetools import LRUCache
|
||||
from openai._models import BaseModel as OpenAIObject
|
||||
|
||||
import litellm
|
||||
|
@ -69,6 +68,8 @@ class InMemoryCache(BaseCache):
|
|||
"""
|
||||
max_size_in_memory [int]: Maximum number of items in cache. done to prevent memory leaks. Use 200 items as a default
|
||||
"""
|
||||
from cachetools import LRUCache
|
||||
|
||||
self.max_size_in_memory = max_size_in_memory or 200
|
||||
self.cache_dict: LRUCache = LRUCache(maxsize=self.max_size_in_memory)
|
||||
self.ttl_dict: LRUCache = LRUCache(maxsize=self.max_size_in_memory)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue