{ "test_id": "tests/integration/inference/test_openai_embeddings.py::test_openai_embeddings_with_dimensions[llama_stack_client-emb=ollama/all-minilm:l6-v2]", "request": { "method": "POST", "url": "http://0.0.0.0:11434/v1/v1/embeddings", "headers": {}, "body": { "model": "all-minilm:l6-v2", "input": "Test dimensions parameter", "encoding_format": "float", "dimensions": 16 }, "endpoint": "/v1/embeddings", "model": "all-minilm:l6-v2" }, "response": { "body": { "__type__": "openai.types.create_embedding_response.CreateEmbeddingResponse", "__data__": { "data": [ { "embedding": [ 0.25369987, 0.016355688, -0.29676768, 0.316427, -0.18642858, 0.076206245, -0.031503417, 0.29860005, -0.496603, -0.36621967, 0.25334543, -0.333392, 0.005993569, 0.14079759, -0.13775977, -0.14680246 ], "index": 0, "object": "embedding" } ], "model": "all-minilm:l6-v2", "object": "list", "usage": { "prompt_tokens": 3, "total_tokens": 3 } } }, "is_streaming": false } }