test: Enable test_text_chat_completion_with_tool_choice_required for remote::vllm

Signed-off-by: Yuan Tang <terrytangyuan@gmail.com>
This commit is contained in:
Yuan Tang 2025-02-18 23:29:16 -05:00
parent 8de7cf103b
commit eb3541c9fb
No known key found for this signature in database

View file

@ -250,8 +250,6 @@ def test_text_chat_completion_with_tool_calling_and_streaming(
def test_text_chat_completion_with_tool_choice_required(
llama_stack_client, text_model_id, get_weather_tool_definition, provider_tool_format, inference_provider_type
):
if inference_provider_type == "remote::vllm":
pytest.xfail("vllm-project/vllm#13002")
response = llama_stack_client.inference.chat_completion(
model_id=text_model_id,
messages=[