feat: use SecretStr for inference provider auth credentials

- RemoteInferenceProviderConfig now has auth_credential: SecretStr
- the default alias is api_key (most common name)
- some providers override to use api_token (RunPod, vLLM, Databricks)
- some providers exclude it (Ollama, TGI, Vertex AI)
This commit is contained in:
Matthew Farrellee 2025-10-08 05:05:05 -04:00
parent 62bac0aad4
commit 6143b9b0c3
56 changed files with 157 additions and 144 deletions

View file

@ -21,11 +21,6 @@ class LlamaProviderDataValidator(BaseModel):
@json_schema_type
class LlamaCompatConfig(RemoteInferenceProviderConfig):
api_key: str | None = Field(
default=None,
description="The Llama API key",
)
openai_compat_api_base: str = Field(
default="https://api.llama.com/compat/v1/",
description="The URL for the Llama API server",

View file

@ -21,9 +21,6 @@ class LlamaCompatInferenceAdapter(OpenAIMixin):
Llama API Inference Adapter for Llama Stack.
"""
def get_api_key(self) -> str:
return self.config.api_key or ""
def get_base_url(self) -> str:
"""
Get the base URL for OpenAI mixin.