fix bug: custom prompt templates registered are never applied to vllm provider

This commit is contained in:
Bincheng Li 2024-03-16 15:35:54 +08:00
parent 6eac5c4f0a
commit e605b04927

View file

@ -1780,9 +1780,11 @@ def completion(
## RESPONSE OBJECT
response = response
elif custom_llm_provider == "vllm":
custom_prompt_dict = custom_prompt_dict or litellm.custom_prompt_dict
model_response = vllm.completion(
model=model,
messages=messages,
custom_prompt_dict=custom_prompt_dict,
model_response=model_response,
print_verbose=print_verbose,
optional_params=optional_params,