mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-05 02:17:31 +00:00
fix: resolve CI test failures for Bedrock provider
- Fix test assertion to match actual error message - Add missing has_model method to FakeModelStore mock - Remove redundant comments and update docs
This commit is contained in:
parent
454aeaaf3e
commit
fb89143949
4 changed files with 6 additions and 8 deletions
|
|
@ -38,6 +38,9 @@ async def test_basic_chat_completion():
|
|||
adapter = BedrockInferenceAdapter(config=config)
|
||||
|
||||
class FakeModelStore:
|
||||
async def has_model(self, model_id):
|
||||
return True
|
||||
|
||||
async def get_model(self, model_id):
|
||||
return SimpleNamespace(provider_resource_id="meta.llama3-1-8b-instruct-v1:0")
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue