mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix bug: custom prompt templates registered are never applied to vllm provider
This commit is contained in:
parent
6eac5c4f0a
commit
e605b04927
1 changed files with 2 additions and 0 deletions
|
@ -1780,9 +1780,11 @@ def completion(
|
|||
## RESPONSE OBJECT
|
||||
response = response
|
||||
elif custom_llm_provider == "vllm":
|
||||
custom_prompt_dict = custom_prompt_dict or litellm.custom_prompt_dict
|
||||
model_response = vllm.completion(
|
||||
model=model,
|
||||
messages=messages,
|
||||
custom_prompt_dict=custom_prompt_dict,
|
||||
model_response=model_response,
|
||||
print_verbose=print_verbose,
|
||||
optional_params=optional_params,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue