diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 792162262..000208043 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -48,7 +48,6 @@ jobs: working-directory: llama_stack/ui - uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1 - continue-on-error: true env: SKIP: no-commit-to-branch RUFF_OUTPUT_FORMAT: github diff --git a/llama_stack/providers/remote/inference/vertexai/vertexai.py b/llama_stack/providers/remote/inference/vertexai/vertexai.py index 27f953ab9..8996543e7 100644 --- a/llama_stack/providers/remote/inference/vertexai/vertexai.py +++ b/llama_stack/providers/remote/inference/vertexai/vertexai.py @@ -41,7 +41,7 @@ class VertexAIInferenceAdapter(OpenAIMixin, LiteLLMOpenAIMixin): # Get default credentials - will read from GOOGLE_APPLICATION_CREDENTIALS credentials, _ = default(scopes=["https://www.googleapis.com/auth/cloud-platform"]) credentials.refresh(google.auth.transport.requests.Request()) - return credentials.token + return str(credentials.token) except Exception: # If we can't get credentials, return empty string to let LiteLLM handle it # This allows the LiteLLM mixin to work with ADC directly