From 3c40c8e583cf6a9d24cdbaa40f5348948864b44e Mon Sep 17 00:00:00 2001 From: Matthew Farrellee Date: Mon, 28 Jul 2025 13:02:16 -0400 Subject: [PATCH] fix: litellm_provider_name for llama-api (#2934) litellm uses "meta_llama" for the provider name, see https://docs.litellm.ai/docs/providers/meta_llama ad https://github.com/BerriAI/litellm/blob/main/litellm/__init__.py#L833 --- .../providers/remote/inference/llama_openai_compat/llama.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama_stack/providers/remote/inference/llama_openai_compat/llama.py b/llama_stack/providers/remote/inference/llama_openai_compat/llama.py index 707aacc7f..4857c6723 100644 --- a/llama_stack/providers/remote/inference/llama_openai_compat/llama.py +++ b/llama_stack/providers/remote/inference/llama_openai_compat/llama.py @@ -32,7 +32,7 @@ class LlamaCompatInferenceAdapter(OpenAIMixin, LiteLLMOpenAIMixin): LiteLLMOpenAIMixin.__init__( self, model_entries=MODEL_ENTRIES, - litellm_provider_name="llama", + litellm_provider_name="meta_llama", api_key_from_config=config.api_key, provider_data_api_key_field="llama_api_key", openai_compat_api_base=config.openai_compat_api_base,