fix(api): update embeddings signature so inputs and outputs list align (#1161)

See Issue #922 

The change is slightly backwards incompatible but no callsite (in our
client codebases or stack-apps) every passes a depth-2
`List[List[InterleavedContentItem]]` (which is now disallowed.)

## Test Plan

```bash
$ cd llama_stack/providers/tests/inference
$ pytest -s -v -k fireworks test_embeddings.py \
   --inference-model nomic-ai/nomic-embed-text-v1.5 --env EMBEDDING_DIMENSION=784
$  pytest -s -v -k together test_embeddings.py \
   --inference-model togethercomputer/m2-bert-80M-8k-retrieval --env EMBEDDING_DIMENSION=784
$ pytest -s -v -k ollama test_embeddings.py \
   --inference-model all-minilm:latest --env EMBEDDING_DIMENSION=784
```

Also ran `tests/client-sdk/inference/test_embeddings.py`
This commit is contained in:
Ashwin Bharambe 2025-02-20 21:43:13 -08:00 committed by GitHub
parent cfa752fc92
commit 6f9d622340
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 85 additions and 41 deletions

View file

@ -9,7 +9,7 @@ from typing import List
from llama_stack.apis.inference import (
EmbeddingsResponse,
InterleavedContent,
InterleavedContentItem,
ModelStore,
)
@ -25,7 +25,7 @@ class SentenceTransformerEmbeddingMixin:
async def embeddings(
self,
model_id: str,
contents: List[InterleavedContent],
contents: List[str] | List[InterleavedContentItem],
) -> EmbeddingsResponse:
model = await self.model_store.get_model(model_id)
embedding_model = self._load_sentence_transformer_model(model.provider_resource_id)