fix: Update inference recorder to handle both Ollama and OpenAI model

o Handle Ollama format where models are nested under response['body']['models']
o Fall back to OpenAI format where models are directly in response['body']

Closes: #3457
Signed-off-by: Derek Higgins <derekh@redhat.com>
This commit is contained in:
Derek Higgins 2025-09-17 10:09:05 +01:00
parent 9acf49753e
commit 2ec323ece1

View file

@ -203,7 +203,12 @@ def _model_identifiers_digest(endpoint: str, response: dict[str, Any]) -> str:
- '/v1/models' (OpenAI): response body is: [ { id: ... }, ... ] - '/v1/models' (OpenAI): response body is: [ { id: ... }, ... ]
Returns a list of unique identifiers or None if structure doesn't match. Returns a list of unique identifiers or None if structure doesn't match.
""" """
items = response["body"] if "models" in response["body"]:
# ollama
items = response["body"]["models"]
else:
# openai
items = response["body"]
idents = [m.model if endpoint == "/api/tags" else m.id for m in items] idents = [m.model if endpoint == "/api/tags" else m.id for m in items]
return sorted(set(idents)) return sorted(set(idents))