test: Enable test_text_chat_completion_with_tool_choice_required for remote::vllm (#1148)

This commit is contained in:
Yuan Tang 2025-02-18 23:52:15 -05:00 committed by GitHub
parent 8de7cf103b
commit a66b4c4c81
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -250,8 +250,6 @@ def test_text_chat_completion_with_tool_calling_and_streaming(
def test_text_chat_completion_with_tool_choice_required(
llama_stack_client, text_model_id, get_weather_tool_definition, provider_tool_format, inference_provider_type
):
if inference_provider_type == "remote::vllm":
pytest.xfail("vllm-project/vllm#13002")
response = llama_stack_client.inference.chat_completion(
model_id=text_model_id,
messages=[