From efdd60014d9a0adc3999b37cdbb4d3f931d19a14 Mon Sep 17 00:00:00 2001 From: Yuan Tang Date: Thu, 13 Feb 2025 13:44:57 -0500 Subject: [PATCH] test: Enable logprobs top_k tests for remote::vllm (#1080) top_k supported was added in https://github.com/meta-llama/llama-stack/pull/1074. The tests should be enabled as well. Verified that tests pass for remote::vllm: ``` LLAMA_STACK_BASE_URL=http://localhost:5003 pytest -v tests/client-sdk/inference/test_text_inference.py -k " test_completion_log_probs_non_streaming or test_completion_log_probs_streaming" ================================================================ test session starts ================================================================ platform linux -- Python 3.10.16, pytest-8.3.4, pluggy-1.5.0 -- /home/yutang/.conda/envs/distribution-myenv/bin/python3.10 cachedir: .pytest_cache rootdir: /home/yutang/repos/llama-stack configfile: pyproject.toml plugins: anyio-4.8.0 collected 14 items / 12 deselected / 2 selected tests/client-sdk/inference/test_text_inference.py::test_completion_log_probs_non_streaming[meta-llama/Llama-3.1-8B-Instruct] PASSED [ 50%] tests/client-sdk/inference/test_text_inference.py::test_completion_log_probs_streaming[meta-llama/Llama-3.1-8B-Instruct] PASSED [100%] =================================================== 2 passed, 12 deselected, 1 warning in 10.03s ==================================================== ``` Signed-off-by: Yuan Tang --- tests/client-sdk/inference/test_text_inference.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/tests/client-sdk/inference/test_text_inference.py b/tests/client-sdk/inference/test_text_inference.py index 206629602..c931ca255 100644 --- a/tests/client-sdk/inference/test_text_inference.py +++ b/tests/client-sdk/inference/test_text_inference.py @@ -14,13 +14,7 @@ PROVIDER_TOOL_PROMPT_FORMAT = { "remote::vllm": "json", } -PROVIDER_LOGPROBS_TOP_K = set( - { - "remote::together", - "remote::fireworks", - # "remote:vllm" - } -) +PROVIDER_LOGPROBS_TOP_K = {"remote::together", "remote::fireworks", "remote::vllm"} @pytest.fixture(scope="session")