Allow TGI adaptor to have non-standard llama model names (#84)

Co-authored-by: Hardik Shah <hjshah@fb.com>
This commit is contained in:
Hardik Shah 2024-09-19 21:42:15 -07:00 committed by GitHub
parent 42d29f3a5a
commit 8fa49593e0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -18,12 +18,6 @@ from llama_stack.providers.utils.inference.prepare_messages import prepare_messa
from .config import TGIImplConfig from .config import TGIImplConfig
HF_SUPPORTED_MODELS = {
"Meta-Llama3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"Meta-Llama3.1-70B-Instruct": "meta-llama/Meta-Llama-3.1-70B-Instruct",
"Meta-Llama3.1-405B-Instruct": "meta-llama/Meta-Llama-3.1-405B-Instruct",
}
class TGIAdapter(Inference): class TGIAdapter(Inference):
def __init__(self, config: TGIImplConfig) -> None: def __init__(self, config: TGIImplConfig) -> None: