mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 04:04:14 +00:00
fix: prevent telemetry from leaking sensitive info
Prevent sensitive information from being logged in telemetry output by assigning SecretStr type to sensitive fields. API keys, password from KV store are now covered. All providers have been converted. Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
parent
8dc9fd6844
commit
c4cb6aa8d9
53 changed files with 121 additions and 109 deletions
|
@ -33,7 +33,7 @@ def test_groq_provider_openai_client_caching():
|
|||
with request_provider_data_context(
|
||||
{"x-llamastack-provider-data": json.dumps({inference_adapter.provider_data_api_key_field: api_key})}
|
||||
):
|
||||
assert inference_adapter.client.api_key == api_key
|
||||
assert inference_adapter.client.api_key.get_secret_value() == api_key
|
||||
|
||||
|
||||
def test_openai_provider_openai_client_caching():
|
||||
|
@ -52,7 +52,7 @@ def test_openai_provider_openai_client_caching():
|
|||
{"x-llamastack-provider-data": json.dumps({inference_adapter.provider_data_api_key_field: api_key})}
|
||||
):
|
||||
openai_client = inference_adapter.client
|
||||
assert openai_client.api_key == api_key
|
||||
assert openai_client.api_key.get_secret_value() == api_key
|
||||
|
||||
|
||||
def test_together_provider_openai_client_caching():
|
||||
|
@ -86,4 +86,4 @@ def test_llama_compat_provider_openai_client_caching():
|
|||
|
||||
for api_key in ["test1", "test2"]:
|
||||
with request_provider_data_context({"x-llamastack-provider-data": json.dumps({"llama_api_key": api_key})}):
|
||||
assert inference_adapter.client.api_key == api_key
|
||||
assert inference_adapter.client.api_key.get_secret_value() == api_key
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue