diff --git a/litellm/model_prices_and_context_window_backup.json b/litellm/model_prices_and_context_window_backup.json index 4824ceec46..3b9bd946f5 100644 --- a/litellm/model_prices_and_context_window_backup.json +++ b/litellm/model_prices_and_context_window_backup.json @@ -4607,7 +4607,7 @@ "source": "https://aistudio.google.com", "supports_tool_choice": true }, - "gemini/learnIm-1.5-pro-experimental": { + "gemini/learnlm-1.5-pro-experimental": { "max_tokens": 8192, "max_input_tokens": 32767, "max_output_tokens": 8192, diff --git a/model_prices_and_context_window.json b/model_prices_and_context_window.json index 4824ceec46..3b9bd946f5 100644 --- a/model_prices_and_context_window.json +++ b/model_prices_and_context_window.json @@ -4607,7 +4607,7 @@ "source": "https://aistudio.google.com", "supports_tool_choice": true }, - "gemini/learnIm-1.5-pro-experimental": { + "gemini/learnlm-1.5-pro-experimental": { "max_tokens": 8192, "max_input_tokens": 32767, "max_output_tokens": 8192, diff --git a/tests/local_testing/test_get_model_info.py b/tests/local_testing/test_get_model_info.py index 89ebcfaff0..57da99135e 100644 --- a/tests/local_testing/test_get_model_info.py +++ b/tests/local_testing/test_get_model_info.py @@ -121,7 +121,11 @@ def test_get_model_info_gemini(): model_map = litellm.model_cost for model, info in model_map.items(): - if model.startswith("gemini/") and not "gemma" in model: + if ( + model.startswith("gemini/") + and not "gemma" in model + and not "learnlm" in model + ): assert info.get("tpm") is not None, f"{model} does not have tpm" assert info.get("rpm") is not None, f"{model} does not have rpm"