From bc65d5c9ef0ec3a80c806bd4c155a32f67a86186 Mon Sep 17 00:00:00 2001 From: jhpiedrahitao Date: Fri, 11 Jul 2025 15:07:22 -0500 Subject: [PATCH] update api_key_from_config initialization --- llama_stack/providers/remote/inference/sambanova/sambanova.py | 2 +- tests/integration/inference/test_openai_completion.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/llama_stack/providers/remote/inference/sambanova/sambanova.py b/llama_stack/providers/remote/inference/sambanova/sambanova.py index aade2f726..9c2dda889 100644 --- a/llama_stack/providers/remote/inference/sambanova/sambanova.py +++ b/llama_stack/providers/remote/inference/sambanova/sambanova.py @@ -182,7 +182,7 @@ class SambaNovaInferenceAdapter(LiteLLMOpenAIMixin): LiteLLMOpenAIMixin.__init__( self, model_entries=MODEL_ENTRIES, - api_key_from_config=self.config.api_key, + api_key_from_config=self.config.api_key.get_secret_value() if self.config.api_key else None, provider_data_api_key_field="sambanova_api_key", ) diff --git a/tests/integration/inference/test_openai_completion.py b/tests/integration/inference/test_openai_completion.py index 05aee5096..e82714ffd 100644 --- a/tests/integration/inference/test_openai_completion.py +++ b/tests/integration/inference/test_openai_completion.py @@ -71,7 +71,6 @@ def skip_if_model_doesnt_support_openai_chat_completion(client_with_models, mode "remote::cerebras", "remote::databricks", "remote::runpod", - "remote::sambanova", "remote::tgi", ): pytest.skip(f"Model {model_id} hosted by {provider.provider_type} doesn't support OpenAI chat completions.")