feat: use SecretStr for inference provider auth credentials (#3724)

# What does this PR do?

use SecretStr for OpenAIMixin providers

- RemoteInferenceProviderConfig now has auth_credential: SecretStr
- the default alias is api_key (most common name)
- some providers override to use api_token (RunPod, vLLM, Databricks)
- some providers exclude it (Ollama, TGI, Vertex AI)

addresses #3517 

## Test Plan

ci w/ new tests
This commit is contained in:
Matthew Farrellee 2025-10-10 10:32:50 -04:00 committed by GitHub
parent 6d8f61206e
commit 0066d986c5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
57 changed files with 158 additions and 149 deletions

View file

@ -76,6 +76,8 @@ def get_config_class_info(config_class_path: str) -> dict[str, Any]:
fields_info = {}
if hasattr(config_class, "model_fields"):
for field_name, field in config_class.model_fields.items():
if getattr(field, "exclude", False):
continue
field_type = str(field.annotation) if field.annotation else "Any"
# this string replace is ridiculous
@ -106,7 +108,10 @@ def get_config_class_info(config_class_path: str) -> dict[str, Any]:
"default": default_value,
"required": field.default is None and not field.is_required,
}
fields_info[field_name] = field_info
# Use alias if available, otherwise use the field name
display_name = field.alias if field.alias else field_name
fields_info[display_name] = field_info
if accepts_extra_config:
config_description = "Additional configuration options that will be forwarded to the underlying provider"