From 7fe3dac26565727463915456bc74dada4f3f9463 Mon Sep 17 00:00:00 2001 From: Matthew Farrellee Date: Mon, 6 Oct 2025 10:42:33 -0400 Subject: [PATCH] remove unnecessary litellm deps --- llama_stack/providers/registry/inference.py | 21 +++++++-------------- 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/llama_stack/providers/registry/inference.py b/llama_stack/providers/registry/inference.py index f51b65cc2..bf6a09b6c 100644 --- a/llama_stack/providers/registry/inference.py +++ b/llama_stack/providers/registry/inference.py @@ -167,7 +167,7 @@ def available_providers() -> list[ProviderSpec]: api=Api.inference, adapter_type="openai", provider_type="remote::openai", - pip_packages=["litellm"], + pip_packages=[], module="llama_stack.providers.remote.inference.openai", config_class="llama_stack.providers.remote.inference.openai.OpenAIConfig", provider_data_validator="llama_stack.providers.remote.inference.openai.config.OpenAIProviderDataValidator", @@ -177,7 +177,7 @@ def available_providers() -> list[ProviderSpec]: api=Api.inference, adapter_type="anthropic", provider_type="remote::anthropic", - pip_packages=["litellm", "anthropic"], + pip_packages=["anthropic"], module="llama_stack.providers.remote.inference.anthropic", config_class="llama_stack.providers.remote.inference.anthropic.AnthropicConfig", provider_data_validator="llama_stack.providers.remote.inference.anthropic.config.AnthropicProviderDataValidator", @@ -187,9 +187,7 @@ def available_providers() -> list[ProviderSpec]: api=Api.inference, adapter_type="gemini", provider_type="remote::gemini", - pip_packages=[ - "litellm", - ], + pip_packages=[], module="llama_stack.providers.remote.inference.gemini", config_class="llama_stack.providers.remote.inference.gemini.GeminiConfig", provider_data_validator="llama_stack.providers.remote.inference.gemini.config.GeminiProviderDataValidator", @@ -200,7 +198,6 @@ def available_providers() -> list[ProviderSpec]: adapter_type="vertexai", provider_type="remote::vertexai", pip_packages=[ - "litellm", "google-cloud-aiplatform", ], module="llama_stack.providers.remote.inference.vertexai", @@ -231,9 +228,7 @@ Available Models: api=Api.inference, adapter_type="groq", provider_type="remote::groq", - pip_packages=[ - "litellm", - ], + pip_packages=[], module="llama_stack.providers.remote.inference.groq", config_class="llama_stack.providers.remote.inference.groq.GroqConfig", provider_data_validator="llama_stack.providers.remote.inference.groq.config.GroqProviderDataValidator", @@ -243,7 +238,7 @@ Available Models: api=Api.inference, adapter_type="llama-openai-compat", provider_type="remote::llama-openai-compat", - pip_packages=["litellm"], + pip_packages=[], module="llama_stack.providers.remote.inference.llama_openai_compat", config_class="llama_stack.providers.remote.inference.llama_openai_compat.config.LlamaCompatConfig", provider_data_validator="llama_stack.providers.remote.inference.llama_openai_compat.config.LlamaProviderDataValidator", @@ -253,9 +248,7 @@ Available Models: api=Api.inference, adapter_type="sambanova", provider_type="remote::sambanova", - pip_packages=[ - "litellm", - ], + pip_packages=[], module="llama_stack.providers.remote.inference.sambanova", config_class="llama_stack.providers.remote.inference.sambanova.SambaNovaImplConfig", provider_data_validator="llama_stack.providers.remote.inference.sambanova.config.SambaNovaProviderDataValidator", @@ -285,7 +278,7 @@ Available Models: api=Api.inference, provider_type="remote::azure", adapter_type="azure", - pip_packages=["litellm"], + pip_packages=[], module="llama_stack.providers.remote.inference.azure", config_class="llama_stack.providers.remote.inference.azure.AzureConfig", provider_data_validator="llama_stack.providers.remote.inference.azure.config.AzureProviderDataValidator",