feat: expand set of known openai models, allow using openai canonical model names

note: the openai provider exposes the litellm specific model names to the user. this change is compatible with that. the litellm names should be deprecated.
This commit is contained in:
Matthew Farrellee 2025-05-14 11:36:20 -04:00
parent a1fbfb51e2
commit e42f374a11
5 changed files with 222 additions and 9 deletions

View file

@ -19,6 +19,13 @@ class OpenAIInferenceAdapter(LiteLLMOpenAIMixin):
provider_data_api_key_field="openai_api_key",
)
self.config = config
# we set is_openai_compat so users can use the canonical
# openai model names like "gpt-4" or "gpt-3.5-turbo"
# and the model name will be translated to litellm's
# "openai/gpt-4" or "openai/gpt-3.5-turbo" transparently.
# if we do not set this, users will be exposed to the
# litellm specific model names, an abstraction leak.
self.is_openai_compat = True
async def initialize(self) -> None:
await super().initialize()