From 8fa49593e04fb4ea0f5ec4ec1120e5431ef65050 Mon Sep 17 00:00:00 2001 From: Hardik Shah Date: Thu, 19 Sep 2024 21:42:15 -0700 Subject: [PATCH] Allow TGI adaptor to have non-standard llama model names (#84) Co-authored-by: Hardik Shah --- llama_stack/providers/adapters/inference/tgi/tgi.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/llama_stack/providers/adapters/inference/tgi/tgi.py b/llama_stack/providers/adapters/inference/tgi/tgi.py index bb0b0ca6a..6c3b38347 100644 --- a/llama_stack/providers/adapters/inference/tgi/tgi.py +++ b/llama_stack/providers/adapters/inference/tgi/tgi.py @@ -18,12 +18,6 @@ from llama_stack.providers.utils.inference.prepare_messages import prepare_messa from .config import TGIImplConfig -HF_SUPPORTED_MODELS = { - "Meta-Llama3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct", - "Meta-Llama3.1-70B-Instruct": "meta-llama/Meta-Llama-3.1-70B-Instruct", - "Meta-Llama3.1-405B-Instruct": "meta-llama/Meta-Llama-3.1-405B-Instruct", -} - class TGIAdapter(Inference): def __init__(self, config: TGIImplConfig) -> None: