feat: use SecretStr for inference provider auth credentials

- RemoteInferenceProviderConfig now has auth_credential: SecretStr
- the default alias is api_key (most common name)
- some providers override to use api_token (RunPod, vLLM, Databricks)
- some providers exclude it (Ollama, TGI, Vertex AI)
This commit is contained in:
Matthew Farrellee 2025-10-08 05:05:05 -04:00
parent 62bac0aad4
commit 6143b9b0c3
56 changed files with 157 additions and 144 deletions

View file

@ -21,11 +21,6 @@ class GeminiProviderDataValidator(BaseModel):
@json_schema_type
class GeminiConfig(RemoteInferenceProviderConfig):
api_key: str | None = Field(
default=None,
description="API key for Gemini models",
)
@classmethod
def sample_run_config(cls, api_key: str = "${env.GEMINI_API_KEY:=}", **kwargs) -> dict[str, Any]:
return {

View file

@ -17,8 +17,5 @@ class GeminiInferenceAdapter(OpenAIMixin):
"text-embedding-004": {"embedding_dimension": 768, "context_length": 2048},
}
def get_api_key(self) -> str:
return self.config.api_key or ""
def get_base_url(self):
return "https://generativelanguage.googleapis.com/v1beta/openai/"