test: add telemetry tests for openai

ensure openai completion routes produce metrics in the existing telemetry suite

Signed-off-by: Charlie Doern <cdoern@redhat.com>
This commit is contained in:
Charlie Doern 2025-07-21 10:08:00 -04:00
parent e2aefd797f
commit e461246eed
22 changed files with 1224 additions and 27 deletions

View file

@ -423,7 +423,7 @@ class InferenceRouter(Inference):
prompt_logprobs=prompt_logprobs,
suffix=suffix,
)
provider = self.routing_table.get_provider_impl(model_obj.identifier)
provider = await self.routing_table.get_provider_impl(model_obj.identifier)
if stream:
return await provider.openai_completion(**params)
# TODO: Metrics do NOT work with openai_completion stream=True due to the fact
@ -523,7 +523,7 @@ class InferenceRouter(Inference):
top_p=top_p,
user=user,
)
provider = self.routing_table.get_provider_impl(model_obj.identifier)
provider = await self.routing_table.get_provider_impl(model_obj.identifier)
if stream:
response_stream = await provider.openai_chat_completion(**params)