mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 18:00:36 +00:00
fix: resolve CI test failures for Bedrock provider
- Fix test assertion to match actual error message - Add missing has_model method to FakeModelStore mock - Remove redundant comments and update docs
This commit is contained in:
parent
454aeaaf3e
commit
fb89143949
4 changed files with 6 additions and 8 deletions
|
|
@ -31,11 +31,8 @@ def test_client_url_construction():
|
|||
|
||||
|
||||
def test_api_key_from_config():
|
||||
"""Test API key is stored as SecretStr in auth_credential"""
|
||||
config = BedrockConfig(api_key="config-key", region_name="us-east-1")
|
||||
adapter = BedrockInferenceAdapter(config=config)
|
||||
|
||||
# API key is stored in auth_credential field (SecretStr)
|
||||
assert adapter.config.auth_credential.get_secret_value() == "config-key"
|
||||
|
||||
|
||||
|
|
@ -75,7 +72,7 @@ async def test_authentication_error_handling():
|
|||
await adapter.openai_chat_completion(params=params)
|
||||
|
||||
assert "AWS Bedrock authentication failed" in str(exc_info.value)
|
||||
assert "Please check your API key" in str(exc_info.value)
|
||||
assert "Please verify your API key" in str(exc_info.value)
|
||||
finally:
|
||||
# Restore original method
|
||||
BedrockInferenceAdapter.__bases__[0].openai_chat_completion = original_method
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue