mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-29 07:14:20 +00:00
chore(api): add mypy coverage to meta_reference_config
Signed-off-by: Mustafa Elbehery <melbeher@redhat.com>
This commit is contained in:
parent
daf660c4ea
commit
22a6c3d9dc
2 changed files with 1 additions and 2 deletions
|
@ -39,7 +39,7 @@ class MetaReferenceInferenceConfig(BaseModel):
|
||||||
def validate_model(cls, model: str) -> str:
|
def validate_model(cls, model: str) -> str:
|
||||||
permitted_models = supported_inference_models()
|
permitted_models = supported_inference_models()
|
||||||
descriptors = [m.descriptor() for m in permitted_models]
|
descriptors = [m.descriptor() for m in permitted_models]
|
||||||
repos = [m.huggingface_repo for m in permitted_models]
|
repos = [m.huggingface_repo for m in permitted_models if m.huggingface_repo is not None]
|
||||||
if model not in (descriptors + repos):
|
if model not in (descriptors + repos):
|
||||||
model_list = "\n\t".join(repos)
|
model_list = "\n\t".join(repos)
|
||||||
raise ValueError(f"Unknown model: `{model}`. Choose from [\n\t{model_list}\n]")
|
raise ValueError(f"Unknown model: `{model}`. Choose from [\n\t{model_list}\n]")
|
||||||
|
|
|
@ -250,7 +250,6 @@ exclude = [
|
||||||
"^llama_stack/providers/inline/agents/meta_reference/safety\\.py$",
|
"^llama_stack/providers/inline/agents/meta_reference/safety\\.py$",
|
||||||
"^llama_stack/providers/inline/datasetio/localfs/",
|
"^llama_stack/providers/inline/datasetio/localfs/",
|
||||||
"^llama_stack/providers/inline/eval/meta_reference/eval\\.py$",
|
"^llama_stack/providers/inline/eval/meta_reference/eval\\.py$",
|
||||||
"^llama_stack/providers/inline/inference/meta_reference/config\\.py$",
|
|
||||||
"^llama_stack/providers/inline/inference/meta_reference/inference\\.py$",
|
"^llama_stack/providers/inline/inference/meta_reference/inference\\.py$",
|
||||||
"^llama_stack/models/llama/llama3/generation\\.py$",
|
"^llama_stack/models/llama/llama3/generation\\.py$",
|
||||||
"^llama_stack/models/llama/llama3/multimodal/model\\.py$",
|
"^llama_stack/models/llama/llama3/multimodal/model\\.py$",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue