chore: use empty SecretStr values as default

Better than using SecretStr | None so we centralize the null handling.

Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
Sébastien Han 2025-09-22 14:21:10 +02:00
parent c4cb6aa8d9
commit 4af141292f
No known key found for this signature in database
51 changed files with 103 additions and 93 deletions

View file

@ -17,7 +17,7 @@ AWS S3-based file storage provider for scalable cloud file management with metad
| `bucket_name` | `<class 'str'>` | No | | S3 bucket name to store files | | `bucket_name` | `<class 'str'>` | No | | S3 bucket name to store files |
| `region` | `<class 'str'>` | No | us-east-1 | AWS region where the bucket is located | | `region` | `<class 'str'>` | No | us-east-1 | AWS region where the bucket is located |
| `aws_access_key_id` | `str \| None` | No | | AWS access key ID (optional if using IAM roles) | | `aws_access_key_id` | `str \| None` | No | | AWS access key ID (optional if using IAM roles) |
| `aws_secret_access_key` | `pydantic.types.SecretStr \| None` | No | | AWS secret access key (optional if using IAM roles) | | `aws_secret_access_key` | `<class 'pydantic.types.SecretStr'>` | No | | AWS secret access key (optional if using IAM roles) |
| `endpoint_url` | `str \| None` | No | | Custom S3 endpoint URL (for MinIO, LocalStack, etc.) | | `endpoint_url` | `str \| None` | No | | Custom S3 endpoint URL (for MinIO, LocalStack, etc.) |
| `auto_create_bucket` | `<class 'bool'>` | No | False | Automatically create the S3 bucket if it doesn't exist | | `auto_create_bucket` | `<class 'bool'>` | No | False | Automatically create the S3 bucket if it doesn't exist |
| `metadata_store` | `utils.sqlstore.sqlstore.SqliteSqlStoreConfig \| utils.sqlstore.sqlstore.PostgresSqlStoreConfig` | No | sqlite | SQL store configuration for file metadata | | `metadata_store` | `utils.sqlstore.sqlstore.SqliteSqlStoreConfig \| utils.sqlstore.sqlstore.PostgresSqlStoreConfig` | No | sqlite | SQL store configuration for file metadata |

View file

@ -14,7 +14,7 @@ Anthropic inference provider for accessing Claude models and Anthropic's AI serv
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `api_key` | `pydantic.types.SecretStr \| None` | No | | API key for Anthropic models | | `api_key` | `<class 'pydantic.types.SecretStr'>` | No | | API key for Anthropic models |
## Sample Configuration ## Sample Configuration

View file

@ -15,8 +15,8 @@ AWS Bedrock inference provider for accessing various AI models through AWS's man
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `aws_access_key_id` | `str \| None` | No | | The AWS access key to use. Default use environment variable: AWS_ACCESS_KEY_ID | | `aws_access_key_id` | `str \| None` | No | | The AWS access key to use. Default use environment variable: AWS_ACCESS_KEY_ID |
| `aws_secret_access_key` | `pydantic.types.SecretStr \| None` | No | | The AWS secret access key to use. Default use environment variable: AWS_SECRET_ACCESS_KEY | | `aws_secret_access_key` | `<class 'pydantic.types.SecretStr'>` | No | | The AWS secret access key to use. Default use environment variable: AWS_SECRET_ACCESS_KEY |
| `aws_session_token` | `pydantic.types.SecretStr \| None` | No | | The AWS session token to use. Default use environment variable: AWS_SESSION_TOKEN | | `aws_session_token` | `<class 'pydantic.types.SecretStr'>` | No | | The AWS session token to use. Default use environment variable: AWS_SESSION_TOKEN |
| `region_name` | `str \| None` | No | | The default AWS Region to use, for example, us-west-1 or us-west-2.Default use environment variable: AWS_DEFAULT_REGION | | `region_name` | `str \| None` | No | | The default AWS Region to use, for example, us-west-1 or us-west-2.Default use environment variable: AWS_DEFAULT_REGION |
| `profile_name` | `str \| None` | No | | The profile name that contains credentials to use.Default use environment variable: AWS_PROFILE | | `profile_name` | `str \| None` | No | | The profile name that contains credentials to use.Default use environment variable: AWS_PROFILE |
| `total_max_attempts` | `int \| None` | No | | An integer representing the maximum number of attempts that will be made for a single request, including the initial attempt. Default use environment variable: AWS_MAX_ATTEMPTS | | `total_max_attempts` | `int \| None` | No | | An integer representing the maximum number of attempts that will be made for a single request, including the initial attempt. Default use environment variable: AWS_MAX_ATTEMPTS |

View file

@ -16,7 +16,7 @@ Fireworks AI inference provider for Llama models and other AI models on the Fire
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `allowed_models` | `list[str \| None` | No | | List of models that should be registered with the model registry. If None, all models are allowed. | | `allowed_models` | `list[str \| None` | No | | List of models that should be registered with the model registry. If None, all models are allowed. |
| `url` | `<class 'str'>` | No | https://api.fireworks.ai/inference/v1 | The URL for the Fireworks server | | `url` | `<class 'str'>` | No | https://api.fireworks.ai/inference/v1 | The URL for the Fireworks server |
| `api_key` | `pydantic.types.SecretStr \| None` | No | | The Fireworks.ai API Key | | `api_key` | `<class 'pydantic.types.SecretStr'>` | No | | The Fireworks.ai API Key |
## Sample Configuration ## Sample Configuration

View file

@ -14,7 +14,7 @@ Google Gemini inference provider for accessing Gemini models and Google's AI ser
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `api_key` | `pydantic.types.SecretStr \| None` | No | | API key for Gemini models | | `api_key` | `<class 'pydantic.types.SecretStr'>` | No | | API key for Gemini models |
## Sample Configuration ## Sample Configuration

View file

@ -14,7 +14,7 @@ Groq inference provider for ultra-fast inference using Groq's LPU technology.
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `api_key` | `pydantic.types.SecretStr \| None` | No | | The Groq API key | | `api_key` | `<class 'pydantic.types.SecretStr'>` | No | | The Groq API key |
| `url` | `<class 'str'>` | No | https://api.groq.com | The URL for the Groq AI server | | `url` | `<class 'str'>` | No | https://api.groq.com | The URL for the Groq AI server |
## Sample Configuration ## Sample Configuration

View file

@ -14,7 +14,7 @@ Llama OpenAI-compatible provider for using Llama models with OpenAI API format.
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `api_key` | `pydantic.types.SecretStr \| None` | No | | The Llama API key | | `api_key` | `<class 'pydantic.types.SecretStr'>` | No | | The Llama API key |
| `openai_compat_api_base` | `<class 'str'>` | No | https://api.llama.com/compat/v1/ | The URL for the Llama API server | | `openai_compat_api_base` | `<class 'str'>` | No | https://api.llama.com/compat/v1/ | The URL for the Llama API server |
## Sample Configuration ## Sample Configuration

View file

@ -15,7 +15,7 @@ NVIDIA inference provider for accessing NVIDIA NIM models and AI services.
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `url` | `<class 'str'>` | No | https://integrate.api.nvidia.com | A base url for accessing the NVIDIA NIM | | `url` | `<class 'str'>` | No | https://integrate.api.nvidia.com | A base url for accessing the NVIDIA NIM |
| `api_key` | `pydantic.types.SecretStr \| None` | No | | The NVIDIA API key, only needed of using the hosted service | | `api_key` | `<class 'pydantic.types.SecretStr'>` | No | | The NVIDIA API key, only needed of using the hosted service |
| `timeout` | `<class 'int'>` | No | 60 | Timeout for the HTTP requests | | `timeout` | `<class 'int'>` | No | 60 | Timeout for the HTTP requests |
| `append_api_version` | `<class 'bool'>` | No | True | When set to false, the API version will not be appended to the base_url. By default, it is true. | | `append_api_version` | `<class 'bool'>` | No | True | When set to false, the API version will not be appended to the base_url. By default, it is true. |

View file

@ -14,7 +14,7 @@ OpenAI inference provider for accessing GPT models and other OpenAI services.
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `api_key` | `pydantic.types.SecretStr \| None` | No | | API key for OpenAI models | | `api_key` | `<class 'pydantic.types.SecretStr'>` | No | | API key for OpenAI models |
| `base_url` | `<class 'str'>` | No | https://api.openai.com/v1 | Base URL for OpenAI API | | `base_url` | `<class 'str'>` | No | https://api.openai.com/v1 | Base URL for OpenAI API |
## Sample Configuration ## Sample Configuration

View file

@ -15,7 +15,7 @@ Passthrough inference provider for connecting to any external inference service
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `url` | `<class 'str'>` | No | | The URL for the passthrough endpoint | | `url` | `<class 'str'>` | No | | The URL for the passthrough endpoint |
| `api_key` | `pydantic.types.SecretStr \| None` | No | | API Key for the passthrouth endpoint | | `api_key` | `<class 'pydantic.types.SecretStr'>` | No | | API Key for the passthrouth endpoint |
## Sample Configuration ## Sample Configuration

View file

@ -15,7 +15,7 @@ RunPod inference provider for running models on RunPod's cloud GPU platform.
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `url` | `str \| None` | No | | The URL for the Runpod model serving endpoint | | `url` | `str \| None` | No | | The URL for the Runpod model serving endpoint |
| `api_token` | `pydantic.types.SecretStr \| None` | No | | The API token | | `api_token` | `<class 'pydantic.types.SecretStr'>` | No | | The API token |
## Sample Configuration ## Sample Configuration

View file

@ -15,7 +15,7 @@ SambaNova inference provider for running models on SambaNova's dataflow architec
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `url` | `<class 'str'>` | No | https://api.sambanova.ai/v1 | The URL for the SambaNova AI server | | `url` | `<class 'str'>` | No | https://api.sambanova.ai/v1 | The URL for the SambaNova AI server |
| `api_key` | `pydantic.types.SecretStr \| None` | No | | The SambaNova cloud API Key | | `api_key` | `<class 'pydantic.types.SecretStr'>` | No | | The SambaNova cloud API Key |
## Sample Configuration ## Sample Configuration

View file

@ -16,7 +16,7 @@ Together AI inference provider for open-source models and collaborative AI devel
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `allowed_models` | `list[str \| None` | No | | List of models that should be registered with the model registry. If None, all models are allowed. | | `allowed_models` | `list[str \| None` | No | | List of models that should be registered with the model registry. If None, all models are allowed. |
| `url` | `<class 'str'>` | No | https://api.together.xyz/v1 | The URL for the Together AI server | | `url` | `<class 'str'>` | No | https://api.together.xyz/v1 | The URL for the Together AI server |
| `api_key` | `pydantic.types.SecretStr \| None` | No | | The Together AI API Key | | `api_key` | `<class 'pydantic.types.SecretStr'>` | No | | The Together AI API Key |
## Sample Configuration ## Sample Configuration

View file

@ -16,7 +16,7 @@ Remote vLLM inference provider for connecting to vLLM servers.
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `url` | `str \| None` | No | | The URL for the vLLM model serving endpoint | | `url` | `str \| None` | No | | The URL for the vLLM model serving endpoint |
| `max_tokens` | `<class 'int'>` | No | 4096 | Maximum number of tokens to generate. | | `max_tokens` | `<class 'int'>` | No | 4096 | Maximum number of tokens to generate. |
| `api_token` | `pydantic.types.SecretStr \| None` | No | ********** | The API token | | `api_token` | `<class 'pydantic.types.SecretStr'>` | No | | The API token |
| `tls_verify` | `bool \| str` | No | True | Whether to verify TLS certificates. Can be a boolean or a path to a CA certificate file. | | `tls_verify` | `bool \| str` | No | True | Whether to verify TLS certificates. Can be a boolean or a path to a CA certificate file. |
| `refresh_models` | `<class 'bool'>` | No | False | Whether to refresh models periodically | | `refresh_models` | `<class 'bool'>` | No | False | Whether to refresh models periodically |

View file

@ -15,7 +15,7 @@ IBM WatsonX inference provider for accessing AI models on IBM's WatsonX platform
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `url` | `<class 'str'>` | No | https://us-south.ml.cloud.ibm.com | A base url for accessing the watsonx.ai | | `url` | `<class 'str'>` | No | https://us-south.ml.cloud.ibm.com | A base url for accessing the watsonx.ai |
| `api_key` | `pydantic.types.SecretStr \| None` | No | | The watsonx API key | | `api_key` | `<class 'pydantic.types.SecretStr'>` | No | | The watsonx API key |
| `project_id` | `str \| None` | No | | The Project ID key | | `project_id` | `str \| None` | No | | The Project ID key |
| `timeout` | `<class 'int'>` | No | 60 | Timeout for the HTTP requests | | `timeout` | `<class 'int'>` | No | 60 | Timeout for the HTTP requests |

View file

@ -15,8 +15,8 @@ AWS Bedrock safety provider for content moderation using AWS's safety services.
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `aws_access_key_id` | `str \| None` | No | | The AWS access key to use. Default use environment variable: AWS_ACCESS_KEY_ID | | `aws_access_key_id` | `str \| None` | No | | The AWS access key to use. Default use environment variable: AWS_ACCESS_KEY_ID |
| `aws_secret_access_key` | `pydantic.types.SecretStr \| None` | No | | The AWS secret access key to use. Default use environment variable: AWS_SECRET_ACCESS_KEY | | `aws_secret_access_key` | `<class 'pydantic.types.SecretStr'>` | No | | The AWS secret access key to use. Default use environment variable: AWS_SECRET_ACCESS_KEY |
| `aws_session_token` | `pydantic.types.SecretStr \| None` | No | | The AWS session token to use. Default use environment variable: AWS_SESSION_TOKEN | | `aws_session_token` | `<class 'pydantic.types.SecretStr'>` | No | | The AWS session token to use. Default use environment variable: AWS_SESSION_TOKEN |
| `region_name` | `str \| None` | No | | The default AWS Region to use, for example, us-west-1 or us-west-2.Default use environment variable: AWS_DEFAULT_REGION | | `region_name` | `str \| None` | No | | The default AWS Region to use, for example, us-west-1 or us-west-2.Default use environment variable: AWS_DEFAULT_REGION |
| `profile_name` | `str \| None` | No | | The profile name that contains credentials to use.Default use environment variable: AWS_PROFILE | | `profile_name` | `str \| None` | No | | The profile name that contains credentials to use.Default use environment variable: AWS_PROFILE |
| `total_max_attempts` | `int \| None` | No | | An integer representing the maximum number of attempts that will be made for a single request, including the initial attempt. Default use environment variable: AWS_MAX_ATTEMPTS | | `total_max_attempts` | `int \| None` | No | | An integer representing the maximum number of attempts that will be made for a single request, including the initial attempt. Default use environment variable: AWS_MAX_ATTEMPTS |

View file

@ -15,7 +15,7 @@ SambaNova's safety provider for content moderation and safety filtering.
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `url` | `<class 'str'>` | No | https://api.sambanova.ai/v1 | The URL for the SambaNova AI server | | `url` | `<class 'str'>` | No | https://api.sambanova.ai/v1 | The URL for the SambaNova AI server |
| `api_key` | `pydantic.types.SecretStr \| None` | No | | The SambaNova cloud API Key | | `api_key` | `<class 'pydantic.types.SecretStr'>` | No | | The SambaNova cloud API Key |
## Sample Configuration ## Sample Configuration

View file

@ -14,7 +14,7 @@ Braintrust scoring provider for evaluation and scoring using the Braintrust plat
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `openai_api_key` | `pydantic.types.SecretStr \| None` | No | | The OpenAI API Key | | `openai_api_key` | `<class 'pydantic.types.SecretStr'>` | No | | The OpenAI API Key |
## Sample Configuration ## Sample Configuration

View file

@ -14,7 +14,7 @@ Bing Search tool for web search capabilities using Microsoft's search engine.
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `api_key` | `pydantic.types.SecretStr \| None` | No | | | | `api_key` | `<class 'pydantic.types.SecretStr'>` | No | | The Bing API key |
| `top_k` | `<class 'int'>` | No | 3 | | | `top_k` | `<class 'int'>` | No | 3 | |
## Sample Configuration ## Sample Configuration

View file

@ -14,7 +14,7 @@ Brave Search tool for web search capabilities with privacy-focused results.
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `api_key` | `pydantic.types.SecretStr \| None` | No | | The Brave Search API Key | | `api_key` | `<class 'pydantic.types.SecretStr'>` | No | | The Brave Search API Key |
| `max_results` | `<class 'int'>` | No | 3 | The maximum number of results to return | | `max_results` | `<class 'int'>` | No | 3 | The maximum number of results to return |
## Sample Configuration ## Sample Configuration

View file

@ -14,7 +14,7 @@ Tavily Search tool for AI-optimized web search with structured results.
| Field | Type | Required | Default | Description | | Field | Type | Required | Default | Description |
|-------|------|----------|---------|-------------| |-------|------|----------|---------|-------------|
| `api_key` | `pydantic.types.SecretStr \| None` | No | | The Tavily Search API Key | | `api_key` | `<class 'pydantic.types.SecretStr'>` | No | | The Tavily Search API Key |
| `max_results` | `<class 'int'>` | No | 3 | The maximum number of results to return | | `max_results` | `<class 'int'>` | No | 3 | The maximum number of results to return |
## Sample Configuration ## Sample Configuration

View file

@ -217,7 +217,7 @@ See [PGVector's documentation](https://github.com/pgvector/pgvector) for more de
| `port` | `int \| None` | No | 5432 | | | `port` | `int \| None` | No | 5432 | |
| `db` | `str \| None` | No | postgres | | | `db` | `str \| None` | No | postgres | |
| `user` | `str \| None` | No | postgres | | | `user` | `str \| None` | No | postgres | |
| `password` | `pydantic.types.SecretStr \| None` | No | mysecretpassword | | | `password` | `<class 'pydantic.types.SecretStr'>` | No | ********** | |
| `kvstore` | `utils.kvstore.config.RedisKVStoreConfig \| utils.kvstore.config.SqliteKVStoreConfig \| utils.kvstore.config.PostgresKVStoreConfig \| utils.kvstore.config.MongoDBKVStoreConfig, annotation=NoneType, required=False, default='sqlite', discriminator='type'` | No | | Config for KV store backend (SQLite only for now) | | `kvstore` | `utils.kvstore.config.RedisKVStoreConfig \| utils.kvstore.config.SqliteKVStoreConfig \| utils.kvstore.config.PostgresKVStoreConfig \| utils.kvstore.config.MongoDBKVStoreConfig, annotation=NoneType, required=False, default='sqlite', discriminator='type'` | No | | Config for KV store backend (SQLite only for now) |
## Sample Configuration ## Sample Configuration

View file

@ -9,8 +9,8 @@ from pydantic import BaseModel, Field, SecretStr
class BraintrustScoringConfig(BaseModel): class BraintrustScoringConfig(BaseModel):
openai_api_key: SecretStr | None = Field( openai_api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="The OpenAI API Key", description="The OpenAI API Key",
) )

View file

@ -17,8 +17,8 @@ class S3FilesImplConfig(BaseModel):
bucket_name: str = Field(description="S3 bucket name to store files") bucket_name: str = Field(description="S3 bucket name to store files")
region: str = Field(default="us-east-1", description="AWS region where the bucket is located") region: str = Field(default="us-east-1", description="AWS region where the bucket is located")
aws_access_key_id: str | None = Field(default=None, description="AWS access key ID (optional if using IAM roles)") aws_access_key_id: str | None = Field(default=None, description="AWS access key ID (optional if using IAM roles)")
aws_secret_access_key: SecretStr | None = Field( aws_secret_access_key: SecretStr = Field(
default=None, description="AWS secret access key (optional if using IAM roles)" default=SecretStr(""), description="AWS secret access key (optional if using IAM roles)"
) )
endpoint_url: str | None = Field(default=None, description="Custom S3 endpoint URL (for MinIO, LocalStack, etc.)") endpoint_url: str | None = Field(default=None, description="Custom S3 endpoint URL (for MinIO, LocalStack, etc.)")
auto_create_bucket: bool = Field( auto_create_bucket: bool = Field(

View file

@ -12,16 +12,16 @@ from llama_stack.schema_utils import json_schema_type
class AnthropicProviderDataValidator(BaseModel): class AnthropicProviderDataValidator(BaseModel):
anthropic_api_key: SecretStr | None = Field( anthropic_api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="API key for Anthropic models", description="API key for Anthropic models",
) )
@json_schema_type @json_schema_type
class AnthropicConfig(BaseModel): class AnthropicConfig(BaseModel):
api_key: SecretStr | None = Field( api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="API key for Anthropic models", description="API key for Anthropic models",
) )

View file

@ -21,7 +21,7 @@ class AzureInferenceAdapter(OpenAIMixin, LiteLLMOpenAIMixin):
LiteLLMOpenAIMixin.__init__( LiteLLMOpenAIMixin.__init__(
self, self,
litellm_provider_name="azure", litellm_provider_name="azure",
api_key_from_config=config.api_key.get_secret_value(), api_key_from_config=config.api_key,
provider_data_api_key_field="azure_api_key", provider_data_api_key_field="azure_api_key",
openai_compat_api_base=str(config.api_base), openai_compat_api_base=str(config.api_base),
) )

View file

@ -21,7 +21,11 @@ class CerebrasImplConfig(BaseModel):
description="Base URL for the Cerebras API", description="Base URL for the Cerebras API",
) )
api_key: SecretStr = Field( api_key: SecretStr = Field(
<<<<<<< HEAD
default=SecretStr(os.environ.get("CEREBRAS_API_KEY")), default=SecretStr(os.environ.get("CEREBRAS_API_KEY")),
=======
default=SecretStr(os.environ.get("CEREBRAS_API_KEY", "")),
>>>>>>> a48f2009 (chore: use empty SecretStr values as default)
description="Cerebras API Key", description="Cerebras API Key",
) )

View file

@ -18,8 +18,8 @@ class FireworksImplConfig(RemoteInferenceProviderConfig):
default="https://api.fireworks.ai/inference/v1", default="https://api.fireworks.ai/inference/v1",
description="The URL for the Fireworks server", description="The URL for the Fireworks server",
) )
api_key: SecretStr | None = Field( api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="The Fireworks.ai API Key", description="The Fireworks.ai API Key",
) )

View file

@ -12,16 +12,16 @@ from llama_stack.schema_utils import json_schema_type
class GeminiProviderDataValidator(BaseModel): class GeminiProviderDataValidator(BaseModel):
gemini_api_key: SecretStr | None = Field( gemini_api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="API key for Gemini models", description="API key for Gemini models",
) )
@json_schema_type @json_schema_type
class GeminiConfig(BaseModel): class GeminiConfig(BaseModel):
api_key: SecretStr | None = Field( api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="API key for Gemini models", description="API key for Gemini models",
) )

View file

@ -12,17 +12,17 @@ from llama_stack.schema_utils import json_schema_type
class GroqProviderDataValidator(BaseModel): class GroqProviderDataValidator(BaseModel):
groq_api_key: SecretStr | None = Field( groq_api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="API key for Groq models", description="API key for Groq models",
) )
@json_schema_type @json_schema_type
class GroqConfig(BaseModel): class GroqConfig(BaseModel):
api_key: SecretStr | None = Field( api_key: SecretStr = Field(
# The Groq client library loads the GROQ_API_KEY environment variable by default # The Groq client library loads the GROQ_API_KEY environment variable by default
default=None, default=SecretStr(""),
description="The Groq API key", description="The Groq API key",
) )

View file

@ -12,16 +12,16 @@ from llama_stack.schema_utils import json_schema_type
class LlamaProviderDataValidator(BaseModel): class LlamaProviderDataValidator(BaseModel):
llama_api_key: SecretStr | None = Field( llama_api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="API key for api.llama models", description="API key for api.llama models",
) )
@json_schema_type @json_schema_type
class LlamaCompatConfig(BaseModel): class LlamaCompatConfig(BaseModel):
api_key: SecretStr | None = Field( api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="The Llama API key", description="The Llama API key",
) )

View file

@ -39,8 +39,8 @@ class NVIDIAConfig(BaseModel):
default_factory=lambda: os.getenv("NVIDIA_BASE_URL", "https://integrate.api.nvidia.com"), default_factory=lambda: os.getenv("NVIDIA_BASE_URL", "https://integrate.api.nvidia.com"),
description="A base url for accessing the NVIDIA NIM", description="A base url for accessing the NVIDIA NIM",
) )
api_key: SecretStr | None = Field( api_key: SecretStr = Field(
default_factory=lambda: SecretStr(os.getenv("NVIDIA_API_KEY")), default_factory=lambda: SecretStr(os.getenv("NVIDIA_API_KEY", "")),
description="The NVIDIA API key, only needed of using the hosted service", description="The NVIDIA API key, only needed of using the hosted service",
) )
timeout: int = Field( timeout: int = Field(

View file

@ -12,16 +12,16 @@ from llama_stack.schema_utils import json_schema_type
class OpenAIProviderDataValidator(BaseModel): class OpenAIProviderDataValidator(BaseModel):
openai_api_key: SecretStr | None = Field( openai_api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="API key for OpenAI models", description="API key for OpenAI models",
) )
@json_schema_type @json_schema_type
class OpenAIConfig(BaseModel): class OpenAIConfig(BaseModel):
api_key: SecretStr | None = Field( api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="API key for OpenAI models", description="API key for OpenAI models",
) )
base_url: str = Field( base_url: str = Field(

View file

@ -18,8 +18,8 @@ class PassthroughImplConfig(BaseModel):
description="The URL for the passthrough endpoint", description="The URL for the passthrough endpoint",
) )
api_key: SecretStr | None = Field( api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="API Key for the passthrouth endpoint", description="API Key for the passthrouth endpoint",
) )

View file

@ -17,8 +17,8 @@ class RunpodImplConfig(BaseModel):
default=None, default=None,
description="The URL for the Runpod model serving endpoint", description="The URL for the Runpod model serving endpoint",
) )
api_token: SecretStr | None = Field( api_token: SecretStr = Field(
default=None, default=SecretStr(""),
description="The API token", description="The API token",
) )

View file

@ -12,8 +12,8 @@ from llama_stack.schema_utils import json_schema_type
class SambaNovaProviderDataValidator(BaseModel): class SambaNovaProviderDataValidator(BaseModel):
sambanova_api_key: str | None = Field( sambanova_api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="Sambanova Cloud API key", description="Sambanova Cloud API key",
) )
@ -24,8 +24,8 @@ class SambaNovaImplConfig(BaseModel):
default="https://api.sambanova.ai/v1", default="https://api.sambanova.ai/v1",
description="The URL for the SambaNova AI server", description="The URL for the SambaNova AI server",
) )
api_key: SecretStr | None = Field( api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="The SambaNova cloud API Key", description="The SambaNova cloud API Key",
) )

View file

@ -18,8 +18,8 @@ class TogetherImplConfig(RemoteInferenceProviderConfig):
default="https://api.together.xyz/v1", default="https://api.together.xyz/v1",
description="The URL for the Together AI server", description="The URL for the Together AI server",
) )
api_key: SecretStr | None = Field( api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="The Together AI API Key", description="The Together AI API Key",
) )

View file

@ -24,12 +24,12 @@ class VertexAIInferenceAdapter(OpenAIMixin, LiteLLMOpenAIMixin):
LiteLLMOpenAIMixin.__init__( LiteLLMOpenAIMixin.__init__(
self, self,
litellm_provider_name="vertex_ai", litellm_provider_name="vertex_ai",
api_key_from_config=None, # Vertex AI uses ADC, not API keys api_key_from_config=SecretStr(""), # Vertex AI uses ADC, not API keys
provider_data_api_key_field="vertex_project", # Use project for validation provider_data_api_key_field="vertex_project", # Use project for validation
) )
self.config = config self.config = config
def get_api_key(self) -> str: def get_api_key(self) -> SecretStr:
""" """
Get an access token for Vertex AI using Application Default Credentials. Get an access token for Vertex AI using Application Default Credentials.
@ -40,7 +40,7 @@ class VertexAIInferenceAdapter(OpenAIMixin, LiteLLMOpenAIMixin):
# Get default credentials - will read from GOOGLE_APPLICATION_CREDENTIALS # Get default credentials - will read from GOOGLE_APPLICATION_CREDENTIALS
credentials, _ = default(scopes=["https://www.googleapis.com/auth/cloud-platform"]) credentials, _ = default(scopes=["https://www.googleapis.com/auth/cloud-platform"])
credentials.refresh(google.auth.transport.requests.Request()) credentials.refresh(google.auth.transport.requests.Request())
return str(credentials.token) return SecretStr(credentials.token)
except Exception: except Exception:
# If we can't get credentials, return empty string to let LiteLLM handle it # If we can't get credentials, return empty string to let LiteLLM handle it
# This allows the LiteLLM mixin to work with ADC directly # This allows the LiteLLM mixin to work with ADC directly

View file

@ -4,13 +4,16 @@
# This source code is licensed under the terms described in the LICENSE file in # This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree. # the root directory of this source tree.
from pydantic import BaseModel from pydantic import BaseModel, Field, SecretStr
from .config import VLLMInferenceAdapterConfig from .config import VLLMInferenceAdapterConfig
class VLLMProviderDataValidator(BaseModel): class VLLMProviderDataValidator(BaseModel):
vllm_api_token: str | None = None vllm_api_token: SecretStr = Field(
default=SecretStr(""),
description="API token for vLLM models",
)
async def get_adapter_impl(config: VLLMInferenceAdapterConfig, _deps): async def get_adapter_impl(config: VLLMInferenceAdapterConfig, _deps):

View file

@ -21,8 +21,8 @@ class VLLMInferenceAdapterConfig(BaseModel):
default=4096, default=4096,
description="Maximum number of tokens to generate.", description="Maximum number of tokens to generate.",
) )
api_token: SecretStr | None = Field( api_token: SecretStr = Field(
default=SecretStr("fake"), default=SecretStr(""),
description="The API token", description="The API token",
) )
tls_verify: bool | str = Field( tls_verify: bool | str = Field(

View file

@ -294,7 +294,7 @@ class VLLMInferenceAdapter(OpenAIMixin, LiteLLMOpenAIMixin, Inference, ModelsPro
self, self,
model_entries=build_hf_repo_model_entries(), model_entries=build_hf_repo_model_entries(),
litellm_provider_name="vllm", litellm_provider_name="vllm",
api_key_from_config=config.api_token.get_secret_value(), api_key_from_config=config.api_token,
provider_data_api_key_field="vllm_api_token", provider_data_api_key_field="vllm_api_token",
openai_compat_api_base=config.url, openai_compat_api_base=config.url,
) )

View file

@ -24,8 +24,8 @@ class WatsonXConfig(BaseModel):
default_factory=lambda: os.getenv("WATSONX_BASE_URL", "https://us-south.ml.cloud.ibm.com"), default_factory=lambda: os.getenv("WATSONX_BASE_URL", "https://us-south.ml.cloud.ibm.com"),
description="A base url for accessing the watsonx.ai", description="A base url for accessing the watsonx.ai",
) )
api_key: SecretStr | None = Field( api_key: SecretStr = Field(
default_factory=lambda: os.getenv("WATSONX_API_KEY"), default_factory=lambda: SecretStr(os.getenv("WATSONX_API_KEY", "")),
description="The watsonx API key", description="The watsonx API key",
) )
project_id: str | None = Field( project_id: str | None = Field(

View file

@ -12,8 +12,8 @@ from llama_stack.schema_utils import json_schema_type
class SambaNovaProviderDataValidator(BaseModel): class SambaNovaProviderDataValidator(BaseModel):
sambanova_api_key: str | None = Field( sambanova_api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="Sambanova Cloud API key", description="Sambanova Cloud API key",
) )
@ -24,8 +24,8 @@ class SambaNovaSafetyConfig(BaseModel):
default="https://api.sambanova.ai/v1", default="https://api.sambanova.ai/v1",
description="The URL for the SambaNova AI server", description="The URL for the SambaNova AI server",
) )
api_key: SecretStr | None = Field( api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="The SambaNova cloud API Key", description="The SambaNova cloud API Key",
) )

View file

@ -6,13 +6,16 @@
from typing import Any from typing import Any
from pydantic import BaseModel, SecretStr from pydantic import BaseModel, Field, SecretStr
class BingSearchToolConfig(BaseModel): class BingSearchToolConfig(BaseModel):
"""Configuration for Bing Search Tool Runtime""" """Configuration for Bing Search Tool Runtime"""
api_key: SecretStr | None = None api_key: SecretStr = Field(
default=SecretStr(""),
description="The Bing API key",
)
top_k: int = 3 top_k: int = 3
@classmethod @classmethod

View file

@ -10,8 +10,8 @@ from pydantic import BaseModel, Field, SecretStr
class BraveSearchToolConfig(BaseModel): class BraveSearchToolConfig(BaseModel):
api_key: SecretStr | None = Field( api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="The Brave Search API Key", description="The Brave Search API Key",
) )
max_results: int = Field( max_results: int = Field(

View file

@ -10,8 +10,8 @@ from pydantic import BaseModel, Field, SecretStr
class TavilySearchToolConfig(BaseModel): class TavilySearchToolConfig(BaseModel):
api_key: SecretStr | None = Field( api_key: SecretStr = Field(
default=None, default=SecretStr(""),
description="The Tavily Search API Key", description="The Tavily Search API Key",
) )
max_results: int = Field( max_results: int = Field(

View file

@ -21,7 +21,7 @@ class PGVectorVectorIOConfig(BaseModel):
port: int | None = Field(default=5432) port: int | None = Field(default=5432)
db: str | None = Field(default="postgres") db: str | None = Field(default="postgres")
user: str | None = Field(default="postgres") user: str | None = Field(default="postgres")
password: SecretStr | None = Field(default="mysecretpassword") password: SecretStr = Field(default=SecretStr("mysecretpassword"))
kvstore: KVStoreConfig | None = Field(description="Config for KV store backend (SQLite only for now)", default=None) kvstore: KVStoreConfig | None = Field(description="Config for KV store backend (SQLite only for now)", default=None)
@classmethod @classmethod

View file

@ -14,12 +14,12 @@ class BedrockBaseConfig(BaseModel):
default_factory=lambda: os.getenv("AWS_ACCESS_KEY_ID"), default_factory=lambda: os.getenv("AWS_ACCESS_KEY_ID"),
description="The AWS access key to use. Default use environment variable: AWS_ACCESS_KEY_ID", description="The AWS access key to use. Default use environment variable: AWS_ACCESS_KEY_ID",
) )
aws_secret_access_key: SecretStr | None = Field( aws_secret_access_key: SecretStr = Field(
default_factory=lambda: SecretStr(val) if (val := os.getenv("AWS_SECRET_ACCESS_KEY")) else None, default_factory=lambda: SecretStr(os.getenv("AWS_SECRET_ACCESS_KEY", "")),
description="The AWS secret access key to use. Default use environment variable: AWS_SECRET_ACCESS_KEY", description="The AWS secret access key to use. Default use environment variable: AWS_SECRET_ACCESS_KEY",
) )
aws_session_token: SecretStr | None = Field( aws_session_token: SecretStr = Field(
default_factory=lambda: SecretStr(val) if (val := os.getenv("AWS_SESSION_TOKEN")) else None, default_factory=lambda: SecretStr(os.getenv("AWS_SESSION_TOKEN", "")),
description="The AWS session token to use. Default use environment variable: AWS_SESSION_TOKEN", description="The AWS session token to use. Default use environment variable: AWS_SESSION_TOKEN",
) )
region_name: str | None = Field( region_name: str | None = Field(

View file

@ -69,7 +69,7 @@ class LiteLLMOpenAIMixin(
def __init__( def __init__(
self, self,
litellm_provider_name: str, litellm_provider_name: str,
api_key_from_config: SecretStr | None, api_key_from_config: SecretStr,
provider_data_api_key_field: str, provider_data_api_key_field: str,
model_entries: list[ProviderModelEntry] | None = None, model_entries: list[ProviderModelEntry] | None = None,
openai_compat_api_base: str | None = None, openai_compat_api_base: str | None = None,

View file

@ -74,7 +74,7 @@ class PostgresKVStoreConfig(CommonConfig):
port: int = 5432 port: int = 5432
db: str = "llamastack" db: str = "llamastack"
user: str user: str
password: SecretStr | None = None password: SecretStr = SecretStr("")
ssl_mode: str | None = None ssl_mode: str | None = None
ca_cert_path: str | None = None ca_cert_path: str | None = None
table_name: str = "llamastack_kvstore" table_name: str = "llamastack_kvstore"
@ -118,7 +118,7 @@ class MongoDBKVStoreConfig(CommonConfig):
port: int = 27017 port: int = 27017
db: str = "llamastack" db: str = "llamastack"
user: str | None = None user: str | None = None
password: SecretStr | None = None password: SecretStr = SecretStr("")
collection_name: str = "llamastack_kvstore" collection_name: str = "llamastack_kvstore"
@classmethod @classmethod

View file

@ -63,11 +63,11 @@ class PostgresSqlStoreConfig(SqlAlchemySqlStoreConfig):
port: int = 5432 port: int = 5432
db: str = "llamastack" db: str = "llamastack"
user: str user: str
password: SecretStr | None = None password: SecretStr = SecretStr("")
@property @property
def engine_str(self) -> str: def engine_str(self) -> str:
return f"postgresql+asyncpg://{self.user}:{self.password.get_secret_value() if self.password else ''}@{self.host}:{self.port}/{self.db}" return f"postgresql+asyncpg://{self.user}:{self.password.get_secret_value()}@{self.host}:{self.port}/{self.db}"
@classmethod @classmethod
def pip_packages(cls) -> list[str]: def pip_packages(cls) -> list[str]: