litellm/tests/local_testing/example_config_yaml/cache_with_params.yaml

11 lines
No EOL
248 B
YAML

model_list:
- model_name: "openai-model"
litellm_params:
model: "gpt-3.5-turbo"
litellm_settings:
cache: True
cache_params:
type: "redis"
supported_call_types: ["embedding", "aembedding"]
host: "os.environ/REDIS_HOST"