diff --git a/llama_stack/providers/adapters/inference/tgi/tgi.py b/llama_stack/providers/adapters/inference/tgi/tgi.py index bb0b0ca6a..6c3b38347 100644 --- a/llama_stack/providers/adapters/inference/tgi/tgi.py +++ b/llama_stack/providers/adapters/inference/tgi/tgi.py @@ -18,12 +18,6 @@ from llama_stack.providers.utils.inference.prepare_messages import prepare_messa from .config import TGIImplConfig -HF_SUPPORTED_MODELS = { - "Meta-Llama3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct", - "Meta-Llama3.1-70B-Instruct": "meta-llama/Meta-Llama-3.1-70B-Instruct", - "Meta-Llama3.1-405B-Instruct": "meta-llama/Meta-Llama-3.1-405B-Instruct", -} - class TGIAdapter(Inference): def __init__(self, config: TGIImplConfig) -> None: