From 2ec323ece1c988ad9df3482c8aead00c4b6963b9 Mon Sep 17 00:00:00 2001 From: Derek Higgins Date: Wed, 17 Sep 2025 10:09:05 +0100 Subject: [PATCH] fix: Update inference recorder to handle both Ollama and OpenAI model o Handle Ollama format where models are nested under response['body']['models'] o Fall back to OpenAI format where models are directly in response['body'] Closes: #3457 Signed-off-by: Derek Higgins --- llama_stack/testing/inference_recorder.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/llama_stack/testing/inference_recorder.py b/llama_stack/testing/inference_recorder.py index 674016fb1..1071da13f 100644 --- a/llama_stack/testing/inference_recorder.py +++ b/llama_stack/testing/inference_recorder.py @@ -203,7 +203,12 @@ def _model_identifiers_digest(endpoint: str, response: dict[str, Any]) -> str: - '/v1/models' (OpenAI): response body is: [ { id: ... }, ... ] Returns a list of unique identifiers or None if structure doesn't match. """ - items = response["body"] + if "models" in response["body"]: + # ollama + items = response["body"]["models"] + else: + # openai + items = response["body"] idents = [m.model if endpoint == "/api/tags" else m.id for m in items] return sorted(set(idents))