Use huggingface_hub inference client for TGI inference

This commit is contained in:
Celina Hanouti 2024-09-05 18:29:04 +02:00
parent 21bedc1596
commit e5bcfdac21
6 changed files with 179 additions and 142 deletions

View file

@ -39,8 +39,9 @@ def available_inference_providers() -> List[ProviderSpec]:
api=Api.inference,
adapter=AdapterSpec(
adapter_id="tgi",
pip_packages=["text-generation"],
pip_packages=["huggingface_hub"],
module="llama_toolchain.inference.adapters.tgi",
config_class="llama_toolchain.inference.adapters.tgi.TGIImplConfig",
),
),
remote_provider_spec(