revert: do not use MySecretStr

We don't need this if we can set it to empty string.

Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
Sébastien Han 2025-09-26 10:33:33 +02:00
parent bc64635835
commit 2a34226727
No known key found for this signature in database
86 changed files with 208 additions and 263 deletions

View file

@ -5,9 +5,8 @@
# the root directory of this source tree.
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, SecretStr
from llama_stack.core.secret_types import MySecretStr
from llama_stack.schema_utils import json_schema_type
@ -33,7 +32,7 @@ class InferenceEndpointImplConfig(BaseModel):
endpoint_name: str = Field(
description="The name of the Hugging Face Inference Endpoint in the format of '{namespace}/{endpoint_name}' (e.g. 'my-cool-org/meta-llama-3-1-8b-instruct-rce'). Namespace is optional and will default to the user account if not provided.",
)
api_token: MySecretStr = Field(
api_token: SecretStr = Field(
description="Your Hugging Face user access token (will default to locally saved token if not provided)",
)
@ -55,7 +54,7 @@ class InferenceAPIImplConfig(BaseModel):
huggingface_repo: str = Field(
description="The model ID of the model on the Hugging Face Hub (e.g. 'meta-llama/Meta-Llama-3.1-70B-Instruct')",
)
api_token: MySecretStr = Field(
api_token: SecretStr = Field(
description="Your Hugging Face user access token (will default to locally saved token if not provided)",
)

View file

@ -8,6 +8,7 @@
from collections.abc import AsyncGenerator
from huggingface_hub import AsyncInferenceClient, HfApi
from pydantic import SecretStr
from llama_stack.apis.common.content_types import (
InterleavedContent,
@ -34,7 +35,6 @@ from llama_stack.apis.inference import (
)
from llama_stack.apis.models import Model
from llama_stack.apis.models.models import ModelType
from llama_stack.core.secret_types import MySecretStr
from llama_stack.log import get_logger
from llama_stack.models.llama.sku_list import all_registered_models
from llama_stack.providers.datatypes import ModelsProtocolPrivate
@ -79,7 +79,7 @@ class _HfAdapter(
ModelsProtocolPrivate,
):
url: str
api_key: MySecretStr
api_key: SecretStr
hf_client: AsyncInferenceClient
max_tokens: int
@ -337,7 +337,7 @@ class TGIAdapter(_HfAdapter):
self.max_tokens = endpoint_info["max_total_tokens"]
self.model_id = endpoint_info["model_id"]
self.url = f"{config.url.rstrip('/')}/v1"
self.api_key = MySecretStr("NO_KEY")
self.api_key = SecretStr("NO_KEY")
class InferenceAPIAdapter(_HfAdapter):