mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
(docs) proxy caching
This commit is contained in:
parent
1f1f749ad9
commit
adbdb31b9d
1 changed files with 1 additions and 2 deletions
|
@ -11,8 +11,7 @@ model_list:
|
||||||
|
|
||||||
litellm_settings:
|
litellm_settings:
|
||||||
set_verbose: True
|
set_verbose: True
|
||||||
cache: # init cache
|
cache: True # set cache responses to True, litellm defaults to using a redis cache
|
||||||
type: redis # tell litellm to use redis caching (Also: `pip install redis`)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Step 2: Add Redis Credentials to .env
|
#### Step 2: Add Redis Credentials to .env
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue