feat: expand set of known openai models, allow using openai canonical model names

note: the openai provider exposes the litellm specific model names to the user. this change is compatible with that. the litellm names should be deprecated.
This commit is contained in:
Matthew Farrellee 2025-05-14 11:36:20 -04:00
parent a1fbfb51e2
commit e42f374a11
5 changed files with 222 additions and 9 deletions

View file

@ -62,6 +62,9 @@ class LiteLLMOpenAIMixin(
Inference,
NeedsRequestProviderData,
):
# TODO: avoid exposing the litellm specific model names to the user.
# potential change: add a prefix param that gets added to the model name
# when calling litellm.
def __init__(
self,
model_entries,
@ -92,7 +95,9 @@ class LiteLLMOpenAIMixin(
return model
def get_litellm_model_name(self, model_id: str) -> str:
return "openai/" + model_id if self.is_openai_compat else model_id
# users may be using openai/ prefix in their model names. the openai/models.py did this by default.
# model_id.startswith("openai/") is for backwards compatibility.
return "openai/" + model_id if self.is_openai_compat and not model_id.startswith("openai/") else model_id
async def completion(
self,