feat: created dynamic model registration for openai and llama openai compat remote inference providers

fix: removed implementation of register_model() from LiteLLMOpenAIMixin, added log message to llama in query_available_models(), added llama-api-client dependency to pyproject.toml
This commit is contained in:
r3v5 2025-07-14 12:39:15 +01:00
parent f85189022c
commit fa5935bd80
No known key found for this signature in database
GPG key ID: 7758B9F272DE67D9
5 changed files with 49 additions and 14 deletions

View file

@ -60,6 +60,17 @@ class OpenAIInferenceAdapter(LiteLLMOpenAIMixin):
# litellm specific model names, an abstraction leak.
self.is_openai_compat = True
async def query_available_models(self) -> list[str]:
"""Query available models from the OpenAI API"""
try:
openai_client = self._get_openai_client()
available_models = await openai_client.models.list()
logger.info(f"Available models from OpenAI: {available_models.data}")
return [model.id for model in available_models.data]
except Exception as e:
logger.warning(f"Failed to query available models from OpenAI: {e}")
return []
async def initialize(self) -> None:
await super().initialize()