Support discovering gemini, anthropic, xai models by calling their /v1/model endpoint (#9530)

* fix: initial commit for adding provider model discovery to gemini

* feat(gemini/): add model discovery for gemini/ route

* docs(set_keys.md): update docs to show you can check available gemini models as well

* feat(anthropic/): add model discovery for anthropic api key

* feat(xai/): add model discovery for XAI

enables checking what models an xai key can call

* ci: bump ci config yml

* fix(topaz/common_utils.py): fix linting error

* fix: fix linting error for python38
This commit is contained in:
Krish Dholakia 2025-03-27 22:50:48 -07:00 committed by GitHub
parent 340a63bace
commit ccbac691e5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 223 additions and 6 deletions

View file

@ -303,6 +303,24 @@ def test_aget_valid_models():
os.environ = old_environ
@pytest.mark.parametrize("custom_llm_provider", ["gemini", "anthropic", "xai"])
def test_get_valid_models_with_custom_llm_provider(custom_llm_provider):
from litellm.utils import ProviderConfigManager
from litellm.types.utils import LlmProviders
provider_config = ProviderConfigManager.get_provider_model_info(
model=None,
provider=LlmProviders(custom_llm_provider),
)
assert provider_config is not None
valid_models = get_valid_models(
check_provider_endpoint=True, custom_llm_provider=custom_llm_provider
)
print(valid_models)
assert len(valid_models) > 0
assert provider_config.get_models() == valid_models
# test_get_valid_models()