mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-22 00:13:08 +00:00
fix: remove consistency checks (#3881)
# What does this PR do? metadata is conflicting with the default embedding model set on server side via extra body, removing the check and just letting metadata take precedence over extra body `ValueError: Embedding model inconsistent between metadata ('text-embedding-3-small') and extra_body ('sentence-transformers/nomic-ai/nomic-embed-text-v1.5')` ## Test Plan CI
This commit is contained in:
parent
4c718523fa
commit
eb2b240594
2 changed files with 0 additions and 54 deletions
|
@ -370,16 +370,6 @@ class OpenAIVectorStoreMixin(ABC):
|
|||
logger.debug(
|
||||
f"Using embedding config from metadata (takes precedence over extra_body): model='{embedding_model}', dimension={embedding_dimension}"
|
||||
)
|
||||
|
||||
# Check for conflicts with extra_body
|
||||
if extra_body.get("embedding_model") and extra_body["embedding_model"] != embedding_model:
|
||||
raise ValueError(
|
||||
f"Embedding model inconsistent between metadata ('{embedding_model}') and extra_body ('{extra_body['embedding_model']}')"
|
||||
)
|
||||
if extra_body.get("embedding_dimension") and extra_body["embedding_dimension"] != embedding_dimension:
|
||||
raise ValueError(
|
||||
f"Embedding dimension inconsistent between metadata ({embedding_dimension}) and extra_body ({extra_body['embedding_dimension']})"
|
||||
)
|
||||
else:
|
||||
embedding_model = extra_body.get("embedding_model")
|
||||
embedding_dimension = extra_body.get("embedding_dimension", EMBEDDING_DIMENSION)
|
||||
|
|
|
@ -1079,50 +1079,6 @@ async def test_embedding_config_consistency_check_passes(vector_io_adapter):
|
|||
assert call_args.embedding_dimension == 768
|
||||
|
||||
|
||||
async def test_embedding_config_inconsistency_errors(vector_io_adapter):
|
||||
"""Test that inconsistent embedding config between metadata and extra_body raises errors."""
|
||||
|
||||
# Mock register_vector_store to avoid actual registration
|
||||
vector_io_adapter.register_vector_store = AsyncMock()
|
||||
# Set provider_id attribute for the adapter
|
||||
vector_io_adapter.__provider_id__ = "test_provider"
|
||||
|
||||
# Test with inconsistent embedding model
|
||||
params = OpenAICreateVectorStoreRequestWithExtraBody(
|
||||
name="test_store",
|
||||
metadata={
|
||||
"embedding_model": "metadata-model",
|
||||
"embedding_dimension": "768",
|
||||
},
|
||||
**{
|
||||
"embedding_model": "extra-body-model",
|
||||
"embedding_dimension": 768,
|
||||
},
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match="Embedding model inconsistent between metadata"):
|
||||
await vector_io_adapter.openai_create_vector_store(params)
|
||||
|
||||
# Reset mock for second test
|
||||
vector_io_adapter.register_vector_store.reset_mock()
|
||||
|
||||
# Test with inconsistent embedding dimension
|
||||
params = OpenAICreateVectorStoreRequestWithExtraBody(
|
||||
name="test_store",
|
||||
metadata={
|
||||
"embedding_model": "same-model",
|
||||
"embedding_dimension": "512",
|
||||
},
|
||||
**{
|
||||
"embedding_model": "same-model",
|
||||
"embedding_dimension": 1024,
|
||||
},
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match="Embedding dimension inconsistent between metadata"):
|
||||
await vector_io_adapter.openai_create_vector_store(params)
|
||||
|
||||
|
||||
async def test_embedding_config_defaults_when_missing(vector_io_adapter):
|
||||
"""Test that embedding dimension defaults to 768 when not provided."""
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue