feat: use SecretStr for inference provider auth credentials

- RemoteInferenceProviderConfig now has auth_credential: SecretStr
- the default alias is api_key (most common name)
- some providers override to use api_token (RunPod, vLLM, Databricks)
- some providers exclude it (Ollama, TGI, Vertex AI)
This commit is contained in:
Matthew Farrellee 2025-10-08 05:05:05 -04:00
parent 62bac0aad4
commit 6143b9b0c3
56 changed files with 157 additions and 144 deletions

View file

@ -6,7 +6,7 @@
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.apis.common.errors import UnsupportedModelError
from llama_stack.apis.models import ModelType
@ -28,6 +28,11 @@ class RemoteInferenceProviderConfig(BaseModel):
default=False,
description="Whether to refresh models periodically from the provider",
)
auth_credential: SecretStr | None = Field(
default=None,
description="Authentication credential for the provider",
alias="api_key",
)
# TODO: this class is more confusing than useful right now. We need to make it