mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 04:04:14 +00:00
fix: Update inference recorder to handle both Ollama and OpenAI model
o Handle Ollama format where models are nested under response['body']['models'] o Fall back to OpenAI format where models are directly in response['body'] Closes: #3457 Signed-off-by: Derek Higgins <derekh@redhat.com>
This commit is contained in:
parent
9acf49753e
commit
2ec323ece1
1 changed files with 6 additions and 1 deletions
|
@ -203,7 +203,12 @@ def _model_identifiers_digest(endpoint: str, response: dict[str, Any]) -> str:
|
|||
- '/v1/models' (OpenAI): response body is: [ { id: ... }, ... ]
|
||||
Returns a list of unique identifiers or None if structure doesn't match.
|
||||
"""
|
||||
items = response["body"]
|
||||
if "models" in response["body"]:
|
||||
# ollama
|
||||
items = response["body"]["models"]
|
||||
else:
|
||||
# openai
|
||||
items = response["body"]
|
||||
idents = [m.model if endpoint == "/api/tags" else m.id for m in items]
|
||||
return sorted(set(idents))
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue