mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-19 02:49:40 +00:00
Since we are pushing for HF repos, we should accept them in inference configs
This commit is contained in:
parent
00816cc8ef
commit
0d4565349b
5 changed files with 14 additions and 8 deletions
|
|
@ -22,9 +22,9 @@ def is_supported_safety_model(model: Model) -> bool:
|
|||
]
|
||||
|
||||
|
||||
def supported_inference_models() -> List[str]:
|
||||
def supported_inference_models() -> List[Model]:
|
||||
return [
|
||||
m.descriptor()
|
||||
m
|
||||
for m in all_registered_models()
|
||||
if (
|
||||
m.model_family in {ModelFamily.llama3_1, ModelFamily.llama3_2}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue