llama-stack-mirror/tests/integration/recordings/responses/d0ac68cbde69.json

38 lines
973 B
JSON

{
"request": {
"method": "POST",
"url": "http://localhost:11434/api/ps",
"headers": {},
"body": {},
"endpoint": "/api/ps",
"model": ""
},
"response": {
"body": {
"__type__": "ollama._types.ProcessResponse",
"__data__": {
"models": [
{
"model": "llama3.2-vision:11b",
"name": "llama3.2-vision:11b",
"digest": "6f2f9757ae97e8a3f8ea33d6adb2b11d93d9a35bef277cd2c0b1b5af8e8d0b1e",
"expires_at": "2025-09-03T11:51:35.966409-07:00",
"size": 12401209008,
"size_vram": 12401209008,
"details": {
"parent_model": "",
"format": "gguf",
"family": "mllama",
"families": [
"mllama"
],
"parameter_size": "10.7B",
"quantization_level": "Q4_K_M"
}
}
]
}
},
"is_streaming": false
}
}