From cae696553cf75e20e02d6513dd301b3bb7e3ad54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Han?= Date: Wed, 10 Sep 2025 15:51:57 +0200 Subject: [PATCH] fix: convert to string on return MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The function signature expects a string to be returned by credentials.token is Any. So let's convert while we return to make mypy happy :) Signed-off-by: Sébastien Han --- llama_stack/providers/remote/inference/vertexai/vertexai.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama_stack/providers/remote/inference/vertexai/vertexai.py b/llama_stack/providers/remote/inference/vertexai/vertexai.py index 27f953ab9..8996543e7 100644 --- a/llama_stack/providers/remote/inference/vertexai/vertexai.py +++ b/llama_stack/providers/remote/inference/vertexai/vertexai.py @@ -41,7 +41,7 @@ class VertexAIInferenceAdapter(OpenAIMixin, LiteLLMOpenAIMixin): # Get default credentials - will read from GOOGLE_APPLICATION_CREDENTIALS credentials, _ = default(scopes=["https://www.googleapis.com/auth/cloud-platform"]) credentials.refresh(google.auth.transport.requests.Request()) - return credentials.token + return str(credentials.token) except Exception: # If we can't get credentials, return empty string to let LiteLLM handle it # This allows the LiteLLM mixin to work with ADC directly