diff --git a/litellm/proxy/proxy_config.yaml b/litellm/proxy/proxy_config.yaml index 5fd4e5b62..762155ed9 100644 --- a/litellm/proxy/proxy_config.yaml +++ b/litellm/proxy/proxy_config.yaml @@ -4,6 +4,9 @@ model_list: model: openai/fake api_key: fake-key api_base: https://exampleopenaiendpoint-production.up.railway.app/ + - model_name: gemini-flash + litellm_params: + model: gemini/gemini-1.5-flash general_settings: master_key: sk-1234 diff --git a/litellm/tests/test_get_model_info.py b/litellm/tests/test_get_model_info.py index 3fd6a6d22..687aa062f 100644 --- a/litellm/tests/test_get_model_info.py +++ b/litellm/tests/test_get_model_info.py @@ -1,13 +1,16 @@ # What is this? ## Unit testing for the 'get_model_info()' function -import os, sys, traceback +import os +import sys +import traceback sys.path.insert( 0, os.path.abspath("../..") ) # Adds the parent directory to the system path +import pytest + import litellm from litellm import get_model_info -import pytest def test_get_model_info_simple_model_name(): @@ -37,3 +40,9 @@ def test_get_model_info_custom_llm_with_same_name_vllm(): pytest.fail("Expected get model info to fail for an unmapped model/provider") except Exception: pass + + +def test_get_model_info_shows_correct_supports_vision(): + info = litellm.get_model_info("gemini/gemini-1.5-flash") + print("info", info) + assert info["supports_vision"] is True diff --git a/litellm/types/utils.py b/litellm/types/utils.py index 3ae9fb056..3ecf36ba2 100644 --- a/litellm/types/utils.py +++ b/litellm/types/utils.py @@ -73,6 +73,7 @@ class ModelInfo(TypedDict, total=False): supported_openai_params: Required[Optional[List[str]]] supports_system_messages: Optional[bool] supports_response_schema: Optional[bool] + supports_vision: Optional[bool] class GenericStreamingChunk(TypedDict): diff --git a/litellm/utils.py b/litellm/utils.py index 56331c6ce..d32800764 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -4829,6 +4829,7 @@ def get_model_info(model: str, custom_llm_provider: Optional[str] = None) -> Mod supports_response_schema=_model_info.get( "supports_response_schema", None ), + supports_vision=_model_info.get("supports_vision", None), ) except Exception: raise Exception(