Litellm dev 01 06 2025 p2 (#7597)

* test(test_amazing_vertex_completion.py): fix test

* test: initial working code gecko test

* fix(vertex_ai_non_gemini.py): support vertex ai code gecko fake streaming

Fixes https://github.com/BerriAI/litellm/issues/7360

* test(test_get_model_info.py): add test for getting custom provider model info

Covers https://github.com/BerriAI/litellm/issues/7575

* fix(utils.py): fix get_provider_model_info check

Handle custom llm provider scenario

Fixes https://github.com/
BerriAI/litellm/issues/7575
This commit is contained in:
Krish Dholakia 2025-01-06 21:04:49 -08:00 committed by GitHub
parent b397dc1497
commit 0c3fef24cd
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 76 additions and 12 deletions

View file

@ -4224,6 +4224,7 @@ def _get_model_info_helper( # noqa: PLR0915
_model_info: Optional[Dict[str, Any]] = None
key: Optional[str] = None
provider_config: Optional[BaseLLMModelInfo] = None
if combined_model_name in litellm.model_cost:
key = combined_model_name
_model_info = _get_model_info_from_model_cost(key=key)
@ -4263,7 +4264,10 @@ def _get_model_info_helper( # noqa: PLR0915
):
_model_info = None
if custom_llm_provider:
if custom_llm_provider and custom_llm_provider in [
provider.value for provider in LlmProviders
]:
# Check if the provider string exists in LlmProviders enum
provider_config = ProviderConfigManager.get_provider_model_info(
model=model, provider=LlmProviders(custom_llm_provider)
)