From 794f90dbb7717a3b06b96af5ed9a09f4a8a1630e Mon Sep 17 00:00:00 2001 From: Bincheng Li <123504381@qq.com> Date: Sat, 16 Mar 2024 15:35:54 +0800 Subject: [PATCH] fix bug: custom prompt templates registered are never applied to vllm provider --- litellm/main.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/litellm/main.py b/litellm/main.py index b20858d89d..38976bcd36 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -1780,9 +1780,11 @@ def completion( ## RESPONSE OBJECT response = response elif custom_llm_provider == "vllm": + custom_prompt_dict = custom_prompt_dict or litellm.custom_prompt_dict model_response = vllm.completion( model=model, messages=messages, + custom_prompt_dict=custom_prompt_dict, model_response=model_response, print_verbose=print_verbose, optional_params=optional_params,