revert: do not use MySecretStr

We don't need this if we can set it to empty string.

Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
Sébastien Han 2025-09-26 10:33:33 +02:00
parent bc64635835
commit 2a34226727
No known key found for this signature in database
86 changed files with 208 additions and 263 deletions

View file

@ -1,21 +0,0 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from pydantic.types import SecretStr
class MySecretStr(SecretStr):
"""A SecretStr that can accept None values to avoid mypy type errors.
This is useful for optional secret fields where you want to avoid
explicit None checks in consuming code.
We chose to not use the SecretStr from pydantic because it does not allow None values and will
let the provider's library fail if the secret is not provided.
"""
def __init__(self, secret_value: str | None = None) -> None:
SecretStr.__init__(self, secret_value) # type: ignore[arg-type]

View file

@ -288,6 +288,12 @@ def _convert_string_to_proper_type_with_config(value: str, path: str, provider_c
field_name = path.split(".")[-1] if "." in path else path
config_class = provider_context["config_class"]
# Only instantiate if the class hasn't been instantiated already
# This handles the case we entered replace_env_vars() with a dict, which
# could happen if we use a sample_run_config() method that returns a dict. Our unit tests do
# this on the adhoc config spec creation.
if isinstance(config_class, str):
config_class = instantiate_class_type(config_class)
if hasattr(config_class, "model_fields") and field_name in config_class.model_fields:
field_info = config_class.model_fields[field_name]
@ -563,7 +569,9 @@ def run_config_from_adhoc_config_spec(
# call method "sample_run_config" on the provider spec config class
provider_config_type = instantiate_class_type(provider_spec.config_class)
provider_config = replace_env_vars(
provider_config_type.sample_run_config(__distro_dir__=distro_dir), provider_registry=provider_registry
provider_config_type.sample_run_config(__distro_dir__=distro_dir),
provider_registry=provider_registry,
current_provider_context=provider_spec.model_dump(),
)
provider_configs_by_api[api_str] = [

View file

@ -5,16 +5,15 @@
# the root directory of this source tree.
from typing import Any
from pydantic import BaseModel
from pydantic import BaseModel, SecretStr
from llama_stack.core.datatypes import Api
from llama_stack.core.secret_types import MySecretStr
from .config import BraintrustScoringConfig
class BraintrustProviderDataValidator(BaseModel):
openai_api_key: MySecretStr
openai_api_key: SecretStr
async def get_provider_impl(

View file

@ -17,7 +17,7 @@ from autoevals.ragas import (
ContextRelevancy,
Faithfulness,
)
from pydantic import BaseModel
from pydantic import BaseModel, SecretStr
from llama_stack.apis.datasetio import DatasetIO
from llama_stack.apis.datasets import Datasets
@ -31,7 +31,6 @@ from llama_stack.apis.scoring import (
from llama_stack.apis.scoring_functions import ScoringFn, ScoringFnParams
from llama_stack.core.datatypes import Api
from llama_stack.core.request_headers import NeedsRequestProviderData
from llama_stack.core.secret_types import MySecretStr
from llama_stack.providers.datatypes import ScoringFunctionsProtocolPrivate
from llama_stack.providers.utils.common.data_schema_validator import (
get_valid_schemas,
@ -153,7 +152,7 @@ class BraintrustScoringImpl(
raise ValueError(
'Pass OpenAI API Key in the header X-LlamaStack-Provider-Data as { "openai_api_key": <your api key>}'
)
self.config.openai_api_key = MySecretStr(provider_data.openai_api_key)
self.config.openai_api_key = SecretStr(provider_data.openai_api_key)
os.environ["OPENAI_API_KEY"] = self.config.openai_api_key.get_secret_value()

View file

@ -5,13 +5,11 @@
# the root directory of this source tree.
from typing import Any
from pydantic import BaseModel, Field
from llama_stack.core.secret_types import MySecretStr
from pydantic import BaseModel, Field, SecretStr
class BraintrustScoringConfig(BaseModel):
openai_api_key: MySecretStr = Field(
openai_api_key: SecretStr = Field(
description="The OpenAI API Key",
)

View file

@ -8,16 +8,14 @@ import os
import warnings
from typing import Any
from pydantic import BaseModel, Field
from llama_stack.core.secret_types import MySecretStr
from pydantic import BaseModel, Field, SecretStr
class NvidiaDatasetIOConfig(BaseModel):
"""Configuration for NVIDIA DatasetIO implementation."""
api_key: MySecretStr = Field(
default_factory=lambda: MySecretStr(os.getenv("NVIDIA_API_KEY", "")),
api_key: SecretStr = Field(
default_factory=lambda: SecretStr(os.getenv("NVIDIA_API_KEY", "")),
description="The NVIDIA API key.",
)

View file

@ -6,9 +6,8 @@
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.providers.utils.sqlstore.sqlstore import SqliteSqlStoreConfig, SqlStoreConfig
@ -18,7 +17,7 @@ class S3FilesImplConfig(BaseModel):
bucket_name: str = Field(description="S3 bucket name to store files")
region: str = Field(default="us-east-1", description="AWS region where the bucket is located")
aws_access_key_id: str | None = Field(default=None, description="AWS access key ID (optional if using IAM roles)")
aws_secret_access_key: MySecretStr = Field(description="AWS secret access key (optional if using IAM roles)")
aws_secret_access_key: SecretStr = Field(description="AWS secret access key (optional if using IAM roles)")
endpoint_url: str | None = Field(default=None, description="Custom S3 endpoint URL (for MinIO, LocalStack, etc.)")
auto_create_bucket: bool = Field(
default=False, description="Automatically create the S3 bucket if it doesn't exist"

View file

@ -28,7 +28,7 @@ class AnthropicInferenceAdapter(OpenAIMixin, LiteLLMOpenAIMixin):
LiteLLMOpenAIMixin.__init__(
self,
litellm_provider_name="anthropic",
api_key_from_config=config.api_key.get_secret_value() if config.api_key else None,
api_key_from_config=config.api_key,
provider_data_api_key_field="anthropic_api_key",
)
self.config = config

View file

@ -6,21 +6,20 @@
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
class AnthropicProviderDataValidator(BaseModel):
anthropic_api_key: MySecretStr = Field(
anthropic_api_key: SecretStr = Field(
description="API key for Anthropic models",
)
@json_schema_type
class AnthropicConfig(BaseModel):
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
description="API key for Anthropic models",
)

View file

@ -7,14 +7,13 @@
import os
from typing import Any
from pydantic import BaseModel, Field, HttpUrl
from pydantic import BaseModel, Field, HttpUrl, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
class AzureProviderDataValidator(BaseModel):
azure_api_key: MySecretStr = Field(
azure_api_key: SecretStr = Field(
description="Azure API key for Azure",
)
azure_api_base: HttpUrl = Field(
@ -32,7 +31,7 @@ class AzureProviderDataValidator(BaseModel):
@json_schema_type
class AzureConfig(BaseModel):
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
description="Azure API key for Azure",
)
api_base: HttpUrl = Field(

View file

@ -7,9 +7,8 @@
import os
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
DEFAULT_BASE_URL = "https://api.cerebras.ai"
@ -21,8 +20,8 @@ class CerebrasImplConfig(BaseModel):
default=os.environ.get("CEREBRAS_BASE_URL", DEFAULT_BASE_URL),
description="Base URL for the Cerebras API",
)
api_key: MySecretStr = Field(
default=MySecretStr(os.environ.get("CEREBRAS_API_KEY")),
api_key: SecretStr = Field(
default=SecretStr(os.environ.get("CEREBRAS_API_KEY")),
description="Cerebras API Key",
)

View file

@ -6,9 +6,8 @@
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
@ -18,7 +17,7 @@ class DatabricksImplConfig(BaseModel):
default=None,
description="The URL for the Databricks model serving endpoint",
)
api_token: MySecretStr = Field(
api_token: SecretStr = Field(
description="The Databricks API token",
)

View file

@ -6,9 +6,8 @@
from typing import Any
from pydantic import Field
from pydantic import Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.providers.utils.inference.model_registry import RemoteInferenceProviderConfig
from llama_stack.schema_utils import json_schema_type
@ -19,7 +18,7 @@ class FireworksImplConfig(RemoteInferenceProviderConfig):
default="https://api.fireworks.ai/inference/v1",
description="The URL for the Fireworks server",
)
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
description="The Fireworks.ai API Key",
)

View file

@ -6,21 +6,20 @@
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
class GeminiProviderDataValidator(BaseModel):
gemini_api_key: MySecretStr = Field(
gemini_api_key: SecretStr = Field(
description="API key for Gemini models",
)
@json_schema_type
class GeminiConfig(BaseModel):
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
description="API key for Gemini models",
)

View file

@ -20,7 +20,7 @@ class GeminiInferenceAdapter(OpenAIMixin, LiteLLMOpenAIMixin):
LiteLLMOpenAIMixin.__init__(
self,
litellm_provider_name="gemini",
api_key_from_config=config.api_key.get_secret_value() if config.api_key else None,
api_key_from_config=config.api_key,
provider_data_api_key_field="gemini_api_key",
)
self.config = config

View file

@ -6,21 +6,20 @@
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
class GroqProviderDataValidator(BaseModel):
groq_api_key: MySecretStr = Field(
groq_api_key: SecretStr = Field(
description="API key for Groq models",
)
@json_schema_type
class GroqConfig(BaseModel):
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
# The Groq client library loads the GROQ_API_KEY environment variable by default
description="The Groq API key",
)

View file

@ -6,21 +6,20 @@
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
class LlamaProviderDataValidator(BaseModel):
llama_api_key: MySecretStr = Field(
llama_api_key: SecretStr = Field(
description="API key for api.llama models",
)
@json_schema_type
class LlamaCompatConfig(BaseModel):
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
description="The Llama API key",
)

View file

@ -7,9 +7,8 @@
import os
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
@ -40,8 +39,8 @@ class NVIDIAConfig(BaseModel):
default_factory=lambda: os.getenv("NVIDIA_BASE_URL", "https://integrate.api.nvidia.com"),
description="A base url for accessing the NVIDIA NIM",
)
api_key: MySecretStr = Field(
default_factory=lambda: MySecretStr(os.getenv("NVIDIA_API_KEY", "")),
api_key: SecretStr = Field(
default_factory=lambda: SecretStr(os.getenv("NVIDIA_API_KEY", "")),
description="The NVIDIA API key, only needed of using the hosted service",
)
timeout: int = Field(

View file

@ -6,21 +6,20 @@
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
class OpenAIProviderDataValidator(BaseModel):
openai_api_key: MySecretStr = Field(
openai_api_key: SecretStr = Field(
description="API key for OpenAI models",
)
@json_schema_type
class OpenAIConfig(BaseModel):
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
description="API key for OpenAI models",
)
base_url: str = Field(

View file

@ -6,9 +6,8 @@
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
@ -19,7 +18,7 @@ class PassthroughImplConfig(BaseModel):
description="The URL for the passthrough endpoint",
)
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
description="API Key for the passthrouth endpoint",
)

View file

@ -6,9 +6,8 @@
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
@ -18,7 +17,7 @@ class RunpodImplConfig(BaseModel):
default=None,
description="The URL for the Runpod model serving endpoint",
)
api_token: MySecretStr = Field(
api_token: SecretStr = Field(
description="The API token",
)

View file

@ -6,14 +6,13 @@
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
class SambaNovaProviderDataValidator(BaseModel):
sambanova_api_key: MySecretStr = Field(
sambanova_api_key: SecretStr = Field(
description="Sambanova Cloud API key",
)
@ -24,7 +23,7 @@ class SambaNovaImplConfig(BaseModel):
default="https://api.sambanova.ai/v1",
description="The URL for the SambaNova AI server",
)
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
description="The SambaNova cloud API Key",
)

View file

@ -29,7 +29,7 @@ class SambaNovaInferenceAdapter(OpenAIMixin, LiteLLMOpenAIMixin):
LiteLLMOpenAIMixin.__init__(
self,
litellm_provider_name="sambanova",
api_key_from_config=self.config.api_key.get_secret_value() if self.config.api_key else None,
api_key_from_config=self.config.api_key,
provider_data_api_key_field="sambanova_api_key",
openai_compat_api_base=self.config.url,
download_images=True, # SambaNova requires base64 image encoding

View file

@ -5,9 +5,8 @@
# the root directory of this source tree.
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
@ -33,7 +32,7 @@ class InferenceEndpointImplConfig(BaseModel):
endpoint_name: str = Field(
description="The name of the Hugging Face Inference Endpoint in the format of '{namespace}/{endpoint_name}' (e.g. 'my-cool-org/meta-llama-3-1-8b-instruct-rce'). Namespace is optional and will default to the user account if not provided.",
)
api_token: MySecretStr = Field(
api_token: SecretStr = Field(
description="Your Hugging Face user access token (will default to locally saved token if not provided)",
)
@ -55,7 +54,7 @@ class InferenceAPIImplConfig(BaseModel):
huggingface_repo: str = Field(
description="The model ID of the model on the Hugging Face Hub (e.g. 'meta-llama/Meta-Llama-3.1-70B-Instruct')",
)
api_token: MySecretStr = Field(
api_token: SecretStr = Field(
description="Your Hugging Face user access token (will default to locally saved token if not provided)",
)

View file

@ -8,6 +8,7 @@
from collections.abc import AsyncGenerator
from huggingface_hub import AsyncInferenceClient, HfApi
from pydantic import SecretStr
from llama_stack.apis.common.content_types import (
InterleavedContent,
@ -34,7 +35,6 @@ from llama_stack.apis.inference import (
)
from llama_stack.apis.models import Model
from llama_stack.apis.models.models import ModelType
from llama_stack.core.secret_types import MySecretStr
from llama_stack.log import get_logger
from llama_stack.models.llama.sku_list import all_registered_models
from llama_stack.providers.datatypes import ModelsProtocolPrivate
@ -79,7 +79,7 @@ class _HfAdapter(
ModelsProtocolPrivate,
):
url: str
api_key: MySecretStr
api_key: SecretStr
hf_client: AsyncInferenceClient
max_tokens: int
@ -337,7 +337,7 @@ class TGIAdapter(_HfAdapter):
self.max_tokens = endpoint_info["max_total_tokens"]
self.model_id = endpoint_info["model_id"]
self.url = f"{config.url.rstrip('/')}/v1"
self.api_key = MySecretStr("NO_KEY")
self.api_key = SecretStr("NO_KEY")
class InferenceAPIAdapter(_HfAdapter):

View file

@ -6,9 +6,8 @@
from typing import Any
from pydantic import Field
from pydantic import Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.providers.utils.inference.model_registry import RemoteInferenceProviderConfig
from llama_stack.schema_utils import json_schema_type
@ -19,7 +18,7 @@ class TogetherImplConfig(RemoteInferenceProviderConfig):
default="https://api.together.xyz/v1",
description="The URL for the Together AI server",
)
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
description="The Together AI API Key",
)

View file

@ -8,9 +8,9 @@ from typing import Any
import google.auth.transport.requests
from google.auth import default
from pydantic import SecretStr
from llama_stack.apis.inference import ChatCompletionRequest
from llama_stack.core.secret_types import MySecretStr
from llama_stack.providers.utils.inference.litellm_openai_mixin import (
LiteLLMOpenAIMixin,
)
@ -24,12 +24,12 @@ class VertexAIInferenceAdapter(OpenAIMixin, LiteLLMOpenAIMixin):
LiteLLMOpenAIMixin.__init__(
self,
litellm_provider_name="vertex_ai",
api_key_from_config=MySecretStr(None), # Vertex AI uses ADC, not API keys
api_key_from_config=SecretStr(""), # Vertex AI uses ADC, not API keys
provider_data_api_key_field="vertex_project", # Use project for validation
)
self.config = config
def get_api_key(self) -> MySecretStr:
def get_api_key(self) -> SecretStr:
"""
Get an access token for Vertex AI using Application Default Credentials.
@ -40,11 +40,11 @@ class VertexAIInferenceAdapter(OpenAIMixin, LiteLLMOpenAIMixin):
# Get default credentials - will read from GOOGLE_APPLICATION_CREDENTIALS
credentials, _ = default(scopes=["https://www.googleapis.com/auth/cloud-platform"])
credentials.refresh(google.auth.transport.requests.Request())
return MySecretStr(credentials.token)
return SecretStr(credentials.token)
except Exception:
# If we can't get credentials, return empty string to let LiteLLM handle it
# This allows the LiteLLM mixin to work with ADC directly
return MySecretStr("")
return SecretStr("")
def get_base_url(self) -> str:
"""

View file

@ -4,15 +4,13 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from pydantic import BaseModel, Field
from llama_stack.core.secret_types import MySecretStr
from pydantic import BaseModel, Field, SecretStr
from .config import VLLMInferenceAdapterConfig
class VLLMProviderDataValidator(BaseModel):
vllm_api_token: MySecretStr = Field(
vllm_api_token: SecretStr = Field(
description="API token for vLLM models",
)

View file

@ -6,9 +6,8 @@
from pathlib import Path
from pydantic import BaseModel, Field, field_validator
from pydantic import BaseModel, Field, SecretStr, field_validator
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
@ -22,7 +21,8 @@ class VLLMInferenceAdapterConfig(BaseModel):
default=4096,
description="Maximum number of tokens to generate.",
)
api_token: MySecretStr = Field(
api_token: SecretStr = Field(
default=SecretStr("fake"),
description="The API token",
)
tls_verify: bool | str = Field(

View file

@ -7,9 +7,8 @@
import os
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
@ -25,8 +24,8 @@ class WatsonXConfig(BaseModel):
default_factory=lambda: os.getenv("WATSONX_BASE_URL", "https://us-south.ml.cloud.ibm.com"),
description="A base url for accessing the watsonx.ai",
)
api_key: MySecretStr = Field(
default_factory=lambda: MySecretStr(os.getenv("WATSONX_API_KEY", "")),
api_key: SecretStr = Field(
default_factory=lambda: SecretStr(os.getenv("WATSONX_API_KEY", "")),
description="The watsonx API key",
)
project_id: str | None = Field(

View file

@ -7,9 +7,7 @@
import os
from typing import Any
from pydantic import BaseModel, Field
from llama_stack.core.secret_types import MySecretStr
from pydantic import BaseModel, Field, SecretStr
# TODO: add default values for all fields
@ -17,8 +15,8 @@ from llama_stack.core.secret_types import MySecretStr
class NvidiaPostTrainingConfig(BaseModel):
"""Configuration for NVIDIA Post Training implementation."""
api_key: MySecretStr = Field(
default_factory=lambda: MySecretStr(os.getenv("NVIDIA_API_KEY", "")),
api_key: SecretStr = Field(
default_factory=lambda: SecretStr(os.getenv("NVIDIA_API_KEY", "")),
description="The NVIDIA API key.",
)

View file

@ -6,14 +6,13 @@
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
class SambaNovaProviderDataValidator(BaseModel):
sambanova_api_key: MySecretStr = Field(
sambanova_api_key: SecretStr = Field(
description="Sambanova Cloud API key",
)
@ -24,7 +23,7 @@ class SambaNovaSafetyConfig(BaseModel):
default="https://api.sambanova.ai/v1",
description="The URL for the SambaNova AI server",
)
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
description="The SambaNova cloud API Key",
)

View file

@ -6,15 +6,13 @@
from typing import Any
from pydantic import BaseModel, Field
from llama_stack.core.secret_types import MySecretStr
from pydantic import BaseModel, Field, SecretStr
class BingSearchToolConfig(BaseModel):
"""Configuration for Bing Search Tool Runtime"""
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
description="The Bing API key",
)
top_k: int = 3

View file

@ -6,13 +6,11 @@
from typing import Any
from pydantic import BaseModel, Field
from llama_stack.core.secret_types import MySecretStr
from pydantic import BaseModel, Field, SecretStr
class BraveSearchToolConfig(BaseModel):
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
description="The Brave Search API Key",
)
max_results: int = Field(

View file

@ -6,13 +6,11 @@
from typing import Any
from pydantic import BaseModel, Field
from llama_stack.core.secret_types import MySecretStr
from pydantic import BaseModel, Field, SecretStr
class TavilySearchToolConfig(BaseModel):
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
description="The Tavily Search API Key",
)
max_results: int = Field(

View file

@ -6,15 +6,13 @@
from typing import Any
from pydantic import BaseModel, Field
from llama_stack.core.secret_types import MySecretStr
from pydantic import BaseModel, Field, SecretStr
class WolframAlphaToolConfig(BaseModel):
"""Configuration for WolframAlpha Tool Runtime"""
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
description="The WolframAlpha API Key",
)

View file

@ -6,7 +6,7 @@
from typing import Any
from pydantic import BaseModel, ConfigDict, Field
from pydantic import BaseModel, ConfigDict, Field, SecretStr
from llama_stack.providers.utils.kvstore.config import KVStoreConfig, SqliteKVStoreConfig
from llama_stack.schema_utils import json_schema_type
@ -15,7 +15,7 @@ from llama_stack.schema_utils import json_schema_type
@json_schema_type
class MilvusVectorIOConfig(BaseModel):
uri: str = Field(description="The URI of the Milvus server")
token: str | None = Field(description="The token of the Milvus server")
token: SecretStr = Field(description="The token of the Milvus server")
consistency_level: str = Field(description="The consistency level of the Milvus server", default="Strong")
kvstore: KVStoreConfig = Field(description="Config for KV store backend")

View file

@ -6,9 +6,8 @@
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.providers.utils.kvstore.config import (
KVStoreConfig,
SqliteKVStoreConfig,
@ -22,7 +21,7 @@ class PGVectorVectorIOConfig(BaseModel):
port: int | None = Field(default=5432)
db: str | None = Field(default="postgres")
user: str | None = Field(default="postgres")
password: MySecretStr = Field(default=MySecretStr("mysecretpassword"))
password: SecretStr = Field(default=SecretStr("mysecretpassword"))
kvstore: KVStoreConfig | None = Field(description="Config for KV store backend (SQLite only for now)", default=None)
@classmethod

View file

@ -6,9 +6,8 @@
from typing import Any
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.providers.utils.kvstore.config import (
KVStoreConfig,
SqliteKVStoreConfig,
@ -24,7 +23,7 @@ class QdrantVectorIOConfig(BaseModel):
grpc_port: int = 6334
prefer_grpc: bool = False
https: bool | None = None
api_key: MySecretStr = Field(
api_key: SecretStr = Field(
description="The API key for the Qdrant instance",
)
prefix: str | None = None

View file

@ -173,7 +173,7 @@ class QdrantVectorIOAdapter(OpenAIVectorStoreMixin, VectorIO, VectorDBsProtocolP
self._qdrant_lock = asyncio.Lock()
async def initialize(self) -> None:
client_config = self.config.model_dump(exclude_none=True, exclude={"kvstore"})
client_config = self.config.model_dump(exclude_none=True, exclude={"kvstore"}, mode="json")
self.client = AsyncQdrantClient(**client_config)
self.kvstore = await kvstore_impl(self.config.kvstore)

View file

@ -6,9 +6,7 @@
import os
from pydantic import BaseModel, Field
from llama_stack.core.secret_types import MySecretStr
from pydantic import BaseModel, Field, SecretStr
class BedrockBaseConfig(BaseModel):
@ -16,12 +14,12 @@ class BedrockBaseConfig(BaseModel):
default_factory=lambda: os.getenv("AWS_ACCESS_KEY_ID"),
description="The AWS access key to use. Default use environment variable: AWS_ACCESS_KEY_ID",
)
aws_secret_access_key: MySecretStr = Field(
default_factory=lambda: MySecretStr(os.getenv("AWS_SECRET_ACCESS_KEY", "")),
aws_secret_access_key: SecretStr = Field(
default_factory=lambda: SecretStr(os.getenv("AWS_SECRET_ACCESS_KEY", "")),
description="The AWS secret access key to use. Default use environment variable: AWS_SECRET_ACCESS_KEY",
)
aws_session_token: MySecretStr = Field(
default_factory=lambda: MySecretStr(os.getenv("AWS_SESSION_TOKEN", "")),
aws_session_token: SecretStr = Field(
default_factory=lambda: SecretStr(os.getenv("AWS_SESSION_TOKEN", "")),
description="The AWS session token to use. Default use environment variable: AWS_SESSION_TOKEN",
)
region_name: str | None = Field(

View file

@ -8,6 +8,7 @@ from collections.abc import AsyncGenerator, AsyncIterator
from typing import Any
import litellm
from pydantic import SecretStr
from llama_stack.apis.common.content_types import (
InterleavedContent,
@ -39,7 +40,6 @@ from llama_stack.apis.inference import (
ToolPromptFormat,
)
from llama_stack.core.request_headers import NeedsRequestProviderData
from llama_stack.core.secret_types import MySecretStr
from llama_stack.log import get_logger
from llama_stack.providers.utils.inference.model_registry import ModelRegistryHelper, ProviderModelEntry
from llama_stack.providers.utils.inference.openai_compat import (
@ -69,7 +69,7 @@ class LiteLLMOpenAIMixin(
def __init__(
self,
litellm_provider_name: str,
api_key_from_config: MySecretStr,
api_key_from_config: SecretStr,
provider_data_api_key_field: str,
model_entries: list[ProviderModelEntry] | None = None,
openai_compat_api_base: str | None = None,
@ -255,7 +255,7 @@ class LiteLLMOpenAIMixin(
**get_sampling_options(request.sampling_params),
}
def get_api_key(self) -> MySecretStr:
def get_api_key(self) -> SecretStr:
provider_data = self.get_request_provider_data()
key_field = self.provider_data_api_key_field
if provider_data and getattr(provider_data, key_field, None):

View file

@ -11,6 +11,7 @@ from collections.abc import AsyncIterator
from typing import Any
from openai import NOT_GIVEN, AsyncOpenAI
from pydantic import SecretStr
from llama_stack.apis.inference import (
Model,
@ -24,7 +25,6 @@ from llama_stack.apis.inference import (
OpenAIResponseFormatParam,
)
from llama_stack.apis.models import ModelType
from llama_stack.core.secret_types import MySecretStr
from llama_stack.log import get_logger
from llama_stack.providers.utils.inference.model_registry import ModelRegistryHelper
from llama_stack.providers.utils.inference.openai_compat import prepare_openai_completion_params
@ -71,14 +71,14 @@ class OpenAIMixin(ModelRegistryHelper, ABC):
allowed_models: list[str] = []
@abstractmethod
def get_api_key(self) -> MySecretStr:
def get_api_key(self) -> SecretStr:
"""
Get the API key.
This method must be implemented by child classes to provide the API key
for authenticating with the OpenAI API or compatible endpoints.
:return: The API key as a MySecretStr
:return: The API key as a SecretStr
"""
pass

View file

@ -8,9 +8,8 @@ import re
from enum import Enum
from typing import Annotated, Literal
from pydantic import BaseModel, Field, field_validator
from pydantic import BaseModel, Field, SecretStr, field_validator
from llama_stack.core.secret_types import MySecretStr
from llama_stack.core.utils.config_dirs import RUNTIME_BASE_DIR
@ -75,7 +74,7 @@ class PostgresKVStoreConfig(CommonConfig):
port: int = 5432
db: str = "llamastack"
user: str
password: MySecretStr = MySecretStr("")
password: SecretStr = SecretStr("")
ssl_mode: str | None = None
ca_cert_path: str | None = None
table_name: str = "llamastack_kvstore"
@ -119,7 +118,7 @@ class MongoDBKVStoreConfig(CommonConfig):
port: int = 27017
db: str = "llamastack"
user: str | None = None
password: MySecretStr = MySecretStr("")
password: SecretStr = SecretStr("")
collection_name: str = "llamastack_kvstore"
@classmethod

View file

@ -9,9 +9,8 @@ from enum import StrEnum
from pathlib import Path
from typing import Annotated, Literal
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.core.utils.config_dirs import RUNTIME_BASE_DIR
from .api import SqlStore
@ -64,7 +63,7 @@ class PostgresSqlStoreConfig(SqlAlchemySqlStoreConfig):
port: int = 5432
db: str = "llamastack"
user: str
password: MySecretStr = MySecretStr("")
password: SecretStr = SecretStr("")
@property
def engine_str(self) -> str: