fix(init.py): rename feature_flag

This commit is contained in:
Krrish Dholakia 2024-08-05 11:23:20 -07:00
parent 3c4c78a71f
commit a9fdfb5a99
3 changed files with 4 additions and 4 deletions

View file

@ -146,7 +146,7 @@ return_response_headers: bool = (
)
##################
logging: bool = True
enable_caching_on_optional_params: bool = (
enable_caching_on_provider_specific_optional_params: bool = (
False # feature-flag for caching on optional params - e.g. 'top_k'
)
caching: bool = (

View file

@ -1903,7 +1903,7 @@ class Cache:
param not in litellm_param_kwargs
): # check if user passed in optional param - e.g. top_k
if (
litellm.enable_caching_on_optional_params is True
litellm.enable_caching_on_provider_specific_optional_params is True
): # feature flagged for now
if kwargs[param] is None:
continue # ignore None params

View file

@ -302,7 +302,7 @@ def test_caching_with_models_v2():
def test_caching_with_optional_params():
litellm.enable_caching_on_optional_params = True
litellm.enable_caching_on_provider_specific_optional_params = True
messages = [
{"role": "user", "content": "who is ishaan CTO of litellm from litellm 2023"}
]
@ -352,7 +352,7 @@ def test_caching_with_optional_params():
print(f"response1: {response1}")
print(f"response2: {response2}")
pytest.fail(f"Error occurred:")
litellm.enable_caching_on_optional_params = False
litellm.enable_caching_on_provider_specific_optional_params = False
embedding_large_text = (