mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 09:53:45 +00:00
fix: allowed_models config did not filter models (#4030)
# What does this PR do?
closes #4022
## Test Plan
ci w/ new tests
Co-authored-by: Ashwin Bharambe <ashwin.bharambe@gmail.com>
(cherry picked from commit 1263448de2)
This commit is contained in:
parent
49a290e53e
commit
cb4b677552
3 changed files with 19 additions and 12 deletions
|
|
@ -82,9 +82,6 @@ class OpenAIMixin(NeedsRequestProviderData, ABC, BaseModel):
|
|||
# This is set in list_models() and used in check_model_availability()
|
||||
_model_cache: dict[str, Model] = {}
|
||||
|
||||
# List of allowed models for this provider, if empty all models allowed
|
||||
allowed_models: list[str] = []
|
||||
|
||||
# Optional field name in provider data to look for API key, which takes precedence
|
||||
provider_data_api_key_field: str | None = None
|
||||
|
||||
|
|
@ -416,7 +413,7 @@ class OpenAIMixin(NeedsRequestProviderData, ABC, BaseModel):
|
|||
for provider_model_id in provider_models_ids:
|
||||
if not isinstance(provider_model_id, str):
|
||||
raise ValueError(f"Model ID {provider_model_id} from list_provider_model_ids() is not a string")
|
||||
if self.allowed_models and provider_model_id not in self.allowed_models:
|
||||
if self.config.allowed_models is not None and provider_model_id not in self.config.allowed_models:
|
||||
logger.info(f"Skipping model {provider_model_id} as it is not in the allowed models list")
|
||||
continue
|
||||
if metadata := self.embedding_model_metadata.get(provider_model_id):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue