fix: openai provider model id (#2229)

# What does this PR do?
Since https://github.com/meta-llama/llama-stack/pull/2193 switched to
openai sdk, we need to strip 'openai/' from the model_id


## Test Plan
start server with openai provider and send a chat completion call
This commit is contained in:
ehhuang 2025-05-22 14:51:01 -07:00 committed by GitHub
parent 549812f51e
commit 8feb1827c8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -92,8 +92,11 @@ class OpenAIInferenceAdapter(LiteLLMOpenAIMixin):
if prompt_logprobs is not None:
logging.warning("prompt_logprobs is not supported by the OpenAI API. Ignoring.")
model_id = (await self.model_store.get_model(model)).provider_resource_id
if model_id.startswith("openai/"):
model_id = model_id[len("openai/") :]
params = await prepare_openai_completion_params(
model=(await self.model_store.get_model(model)).provider_resource_id,
model=model_id,
prompt=prompt,
best_of=best_of,
echo=echo,
@ -139,8 +142,11 @@ class OpenAIInferenceAdapter(LiteLLMOpenAIMixin):
top_p: float | None = None,
user: str | None = None,
) -> OpenAIChatCompletion | AsyncIterator[OpenAIChatCompletionChunk]:
model_id = (await self.model_store.get_model(model)).provider_resource_id
if model_id.startswith("openai/"):
model_id = model_id[len("openai/") :]
params = await prepare_openai_completion_params(
model=(await self.model_store.get_model(model)).provider_resource_id,
model=model_id,
messages=messages,
frequency_penalty=frequency_penalty,
function_call=function_call,