mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-23 16:37:28 +00:00
fix: remove consistency checks (#3881)
# What does this PR do? metadata is conflicting with the default embedding model set on server side via extra body, removing the check and just letting metadata take precedence over extra body `ValueError: Embedding model inconsistent between metadata ('text-embedding-3-small') and extra_body ('sentence-transformers/nomic-ai/nomic-embed-text-v1.5')` ## Test Plan CI
This commit is contained in:
parent
4c718523fa
commit
eb2b240594
2 changed files with 0 additions and 54 deletions
|
@ -370,16 +370,6 @@ class OpenAIVectorStoreMixin(ABC):
|
|||
logger.debug(
|
||||
f"Using embedding config from metadata (takes precedence over extra_body): model='{embedding_model}', dimension={embedding_dimension}"
|
||||
)
|
||||
|
||||
# Check for conflicts with extra_body
|
||||
if extra_body.get("embedding_model") and extra_body["embedding_model"] != embedding_model:
|
||||
raise ValueError(
|
||||
f"Embedding model inconsistent between metadata ('{embedding_model}') and extra_body ('{extra_body['embedding_model']}')"
|
||||
)
|
||||
if extra_body.get("embedding_dimension") and extra_body["embedding_dimension"] != embedding_dimension:
|
||||
raise ValueError(
|
||||
f"Embedding dimension inconsistent between metadata ({embedding_dimension}) and extra_body ({extra_body['embedding_dimension']})"
|
||||
)
|
||||
else:
|
||||
embedding_model = extra_body.get("embedding_model")
|
||||
embedding_dimension = extra_body.get("embedding_dimension", EMBEDDING_DIMENSION)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue