mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 09:53:45 +00:00
fix: resolve CI test failures for Bedrock provider
- Fix test assertion to match actual error message - Add missing has_model method to FakeModelStore mock - Remove redundant comments and update docs
This commit is contained in:
parent
454aeaaf3e
commit
fb89143949
4 changed files with 6 additions and 8 deletions
|
|
@ -19,7 +19,7 @@ AWS Bedrock safety provider for content moderation using AWS's safety services.
|
|||
| `aws_access_key_id` | `str \| None` | No | | The AWS access key to use. Default use environment variable: AWS_ACCESS_KEY_ID |
|
||||
| `aws_secret_access_key` | `str \| None` | No | | The AWS secret access key to use. Default use environment variable: AWS_SECRET_ACCESS_KEY |
|
||||
| `aws_session_token` | `str \| None` | No | | The AWS session token to use. Default use environment variable: AWS_SESSION_TOKEN |
|
||||
| `region_name` | `str \| None` | No | us-east-2 | The default AWS Region to use, for example, us-west-1 or us-west-2.Default use environment variable: AWS_DEFAULT_REGION |
|
||||
| `region_name` | `str \| None` | No | | The default AWS Region to use, for example, us-west-1 or us-west-2.Default use environment variable: AWS_DEFAULT_REGION |
|
||||
| `profile_name` | `str \| None` | No | | The profile name that contains credentials to use.Default use environment variable: AWS_PROFILE |
|
||||
| `total_max_attempts` | `int \| None` | No | | An integer representing the maximum number of attempts that will be made for a single request, including the initial attempt. Default use environment variable: AWS_MAX_ATTEMPTS |
|
||||
| `retry_mode` | `str \| None` | No | | A string representing the type of retries Boto3 will perform.Default use environment variable: AWS_RETRY_MODE |
|
||||
|
|
|
|||
|
|
@ -95,9 +95,8 @@ class BedrockInferenceAdapter(OpenAIMixin):
|
|||
result = await super().openai_chat_completion(params=params)
|
||||
logger.debug(f"Bedrock API returned: {type(result).__name__ if result is not None else 'None'}")
|
||||
|
||||
# Defensive check for unexpected None response
|
||||
if result is None:
|
||||
logger.error(f"OpenAI client returned None for model={params.model}, stream={params.stream}")
|
||||
logger.error(f"Bedrock OpenAI client returned None for model={params.model}, stream={params.stream}")
|
||||
raise RuntimeError(
|
||||
f"Bedrock API returned no response for model '{params.model}'. "
|
||||
"This may indicate the model is not supported or a network/API issue occurred."
|
||||
|
|
@ -105,7 +104,6 @@ class BedrockInferenceAdapter(OpenAIMixin):
|
|||
|
||||
return result
|
||||
except AuthenticationError as e:
|
||||
# Extract detailed error message from the exception
|
||||
error_msg = str(e)
|
||||
|
||||
# Check if this is a token expiration error
|
||||
|
|
|
|||
|
|
@ -31,11 +31,8 @@ def test_client_url_construction():
|
|||
|
||||
|
||||
def test_api_key_from_config():
|
||||
"""Test API key is stored as SecretStr in auth_credential"""
|
||||
config = BedrockConfig(api_key="config-key", region_name="us-east-1")
|
||||
adapter = BedrockInferenceAdapter(config=config)
|
||||
|
||||
# API key is stored in auth_credential field (SecretStr)
|
||||
assert adapter.config.auth_credential.get_secret_value() == "config-key"
|
||||
|
||||
|
||||
|
|
@ -75,7 +72,7 @@ async def test_authentication_error_handling():
|
|||
await adapter.openai_chat_completion(params=params)
|
||||
|
||||
assert "AWS Bedrock authentication failed" in str(exc_info.value)
|
||||
assert "Please check your API key" in str(exc_info.value)
|
||||
assert "Please verify your API key" in str(exc_info.value)
|
||||
finally:
|
||||
# Restore original method
|
||||
BedrockInferenceAdapter.__bases__[0].openai_chat_completion = original_method
|
||||
|
|
|
|||
|
|
@ -38,6 +38,9 @@ async def test_basic_chat_completion():
|
|||
adapter = BedrockInferenceAdapter(config=config)
|
||||
|
||||
class FakeModelStore:
|
||||
async def has_model(self, model_id):
|
||||
return True
|
||||
|
||||
async def get_model(self, model_id):
|
||||
return SimpleNamespace(provider_resource_id="meta.llama3-1-8b-instruct-v1:0")
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue