fix bug: custom prompt templates registered are never applied to vllm provider

This commit is contained in:
Bincheng Li 2024-03-16 15:35:54 +08:00
parent 6eac5c4f0a
commit e605b04927

View file

@ -1780,9 +1780,11 @@ def completion(
## RESPONSE OBJECT ## RESPONSE OBJECT
response = response response = response
elif custom_llm_provider == "vllm": elif custom_llm_provider == "vllm":
custom_prompt_dict = custom_prompt_dict or litellm.custom_prompt_dict
model_response = vllm.completion( model_response = vllm.completion(
model=model, model=model,
messages=messages, messages=messages,
custom_prompt_dict=custom_prompt_dict,
model_response=model_response, model_response=model_response,
print_verbose=print_verbose, print_verbose=print_verbose,
optional_params=optional_params, optional_params=optional_params,