This commit is contained in:
Matthew Farrellee 2025-07-26 10:54:56 +00:00 committed by GitHub
commit 3a7416cc75
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -73,6 +73,15 @@ class LiteLLMOpenAIMixin(
provider_data_api_key_field: str, provider_data_api_key_field: str,
openai_compat_api_base: str | None = None, openai_compat_api_base: str | None = None,
): ):
"""
Initialize the LiteLLMOpenAIMixin.
:param model_entries: The model entries to register.
:param api_key_from_config: The API key to use from the config.
:param provider_data_api_key_field: The field in the provider data that contains the API key.
:param litellm_provider_name: The name of the provider, used for model lookups.
:param openai_compat_api_base: The base URL for OpenAI compatibility, or None if not using OpenAI compatibility.
"""
ModelRegistryHelper.__init__(self, model_entries) ModelRegistryHelper.__init__(self, model_entries)
self.litellm_provider_name = litellm_provider_name self.litellm_provider_name = litellm_provider_name
@ -85,6 +94,8 @@ class LiteLLMOpenAIMixin(
else: else:
self.is_openai_compat = False self.is_openai_compat = False
self.litellm_provider_name = litellm_provider_name
async def initialize(self): async def initialize(self):
pass pass
@ -428,3 +439,17 @@ class LiteLLMOpenAIMixin(
logprobs: LogProbConfig | None = None, logprobs: LogProbConfig | None = None,
): ):
raise NotImplementedError("Batch chat completion is not supported for OpenAI Compat") raise NotImplementedError("Batch chat completion is not supported for OpenAI Compat")
async def check_model_availability(self, model: str) -> bool:
"""
Check if a specific model is available via LiteLLM for the current
provider (self.litellm_provider_name).
:param model: The model identifier to check.
:return: True if the model is available dynamically, False otherwise.
"""
if self.litellm_provider_name not in litellm.models_by_provider:
logger.error(f"Provider {self.litellm_provider_name} is not registered in litellm.")
return False
return model in litellm.models_by_provider[self.litellm_provider_name]