From adb83e0465f03c75f6512d17cb8153d00ea33089 Mon Sep 17 00:00:00 2001 From: Yuan Tang Date: Sat, 8 Feb 2025 23:18:39 -0500 Subject: [PATCH] test: Use JSON tool prompt format for remote::vllm provider Signed-off-by: Yuan Tang --- tests/client-sdk/inference/test_text_inference.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/client-sdk/inference/test_text_inference.py b/tests/client-sdk/inference/test_text_inference.py index aa0e510dd..81b476218 100644 --- a/tests/client-sdk/inference/test_text_inference.py +++ b/tests/client-sdk/inference/test_text_inference.py @@ -11,6 +11,7 @@ PROVIDER_TOOL_PROMPT_FORMAT = { "remote::ollama": "json", "remote::together": "json", "remote::fireworks": "json", + "remote::vllm": "json", } PROVIDER_LOGPROBS_TOP_K = set(