llama-stack-mirror/tests/integration/recordings/responses/f23defea82ec.json
Matthew Farrellee 3943594fe6 chore: use ollama/all-minilm:l6-v2 for ollama tests
recordings:
- ./scripts/integration-tests.sh --stack-config server:ci-tests --suite base --setup ollama --inference-mode record
- ./scripts/integration-tests.sh --stack-config server:ci-tests --suite vision --setup ollama-vision --inference-mode record
2025-09-24 07:30:38 -04:00

53 lines
1.2 KiB
JSON

{
"request": {
"method": "POST",
"url": "http://0.0.0.0:11434/v1/v1/embeddings",
"headers": {},
"body": {
"model": "all-minilm:l6-v2",
"input": "Test dimensions parameter",
"encoding_format": "float",
"dimensions": 16
},
"endpoint": "/v1/embeddings",
"model": "all-minilm:l6-v2"
},
"response": {
"body": {
"__type__": "openai.types.create_embedding_response.CreateEmbeddingResponse",
"__data__": {
"data": [
{
"embedding": [
0.253706,
0.016367152,
-0.29664654,
0.31654558,
-0.18624601,
0.07602756,
-0.031531323,
0.2986085,
-0.49672848,
-0.36617878,
0.25328273,
-0.33349335,
0.0060151755,
0.14081024,
-0.13757885,
-0.14679416
],
"index": 0,
"object": "embedding"
}
],
"model": "all-minilm:l6-v2",
"object": "list",
"usage": {
"prompt_tokens": 3,
"total_tokens": 3
}
}
},
"is_streaming": false
}
}