Litellm support in llama stack:

This commit is contained in:
Abhishek Kumawat 2025-02-03 06:10:51 -08:00
parent 15dcc4ea5e
commit 9e0c8a82cb
6 changed files with 359 additions and 0 deletions

View file

@ -164,6 +164,15 @@ def available_providers() -> List[ProviderSpec]:
provider_data_validator="llama_stack.providers.remote.inference.groq.GroqProviderDataValidator",
),
),
remote_provider_spec(
api=Api.inference,
adapter=AdapterSpec(
adapter_type="litellm",
pip_packages=["litellm"],
module="llama_stack.providers.remote.inference.litellm",
config_class="llama_stack.providers.remote.inference.litellm.LitellmConfig",
),
),
remote_provider_spec(
api=Api.inference,
adapter=AdapterSpec(