mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-15 06:00:48 +00:00
feat(vector-io): implement global default embedding model configuration (Issue #2729)
- Add VectorStoreConfig with global default_embedding_model and default_embedding_dimension - Support environment variables LLAMA_STACK_DEFAULT_EMBEDDING_MODEL and LLAMA_STACK_DEFAULT_EMBEDDING_DIMENSION - Implement precedence: explicit model > global default > clear error (no fallback) - Update VectorIORouter with _resolve_embedding_model() precedence logic - Remove non-deterministic 'first model in run.yaml' fallback behavior - Add vector_store_config to StackRunConfig and all distribution templates - Include comprehensive unit tests for config loading and router precedence - Update documentation with configuration examples and usage patterns - Fix error messages to include 'Failed to' prefix per coding standards Resolves deterministic vector store creation by eliminating unpredictable fallbacks and providing clear configuration options at the stack level.
This commit is contained in:
parent
2af45a240e
commit
0eff77c73d
1 changed files with 13 additions and 0 deletions
|
@ -63,6 +63,19 @@ def pytest_configure(config):
|
|||
os.environ["DISABLE_CODE_SANDBOX"] = "1"
|
||||
logger.info("Setting DISABLE_CODE_SANDBOX=1 for macOS")
|
||||
|
||||
# After processing CLI --env overrides, ensure global default embedding model is set for vector-store operations
|
||||
embedding_model_opt = config.getoption("--embedding-model") or "sentence-transformers/all-MiniLM-L6-v2"
|
||||
if embedding_model_opt and not os.getenv("LLAMA_STACK_DEFAULT_EMBEDDING_MODEL"):
|
||||
# Use first value in comma-separated list (if any)
|
||||
default_model = embedding_model_opt.split(",")[0].strip()
|
||||
os.environ["LLAMA_STACK_DEFAULT_EMBEDDING_MODEL"] = default_model
|
||||
logger.info(f"Setting LLAMA_STACK_DEFAULT_EMBEDDING_MODEL={default_model}")
|
||||
|
||||
embedding_dim_opt = config.getoption("--embedding-dimension") or 384
|
||||
if not os.getenv("LLAMA_STACK_DEFAULT_EMBEDDING_DIMENSION") and embedding_dim_opt:
|
||||
os.environ["LLAMA_STACK_DEFAULT_EMBEDDING_DIMENSION"] = str(embedding_dim_opt)
|
||||
logger.info(f"Setting LLAMA_STACK_DEFAULT_EMBEDDING_DIMENSION={embedding_dim_opt}")
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue