mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-03 01:03:59 +00:00
vllm unit test, check for exception on error
This commit is contained in:
parent
c436269cd4
commit
519afdde5f
1 changed files with 2 additions and 2 deletions
|
@ -107,8 +107,8 @@ async def test_register_model_checks_vllm(mock_openai_models_list, vllm_inferenc
|
|||
|
||||
foo_model = Model(identifier="foo", provider_resource_id="foo", provider_id="vllm-inference")
|
||||
|
||||
await vllm_inference_adapter.register_model(foo_model)
|
||||
mock_openai_models_list.assert_called()
|
||||
with pytest.raises(ValueError):
|
||||
await vllm_inference_adapter.register_model(foo_model)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue