mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 19:54:13 +00:00
fix bug: custom prompt templates registered are never applied to vllm provider
This commit is contained in:
parent
6aa9276253
commit
794f90dbb7
1 changed files with 2 additions and 0 deletions
|
@ -1780,9 +1780,11 @@ def completion(
|
||||||
## RESPONSE OBJECT
|
## RESPONSE OBJECT
|
||||||
response = response
|
response = response
|
||||||
elif custom_llm_provider == "vllm":
|
elif custom_llm_provider == "vllm":
|
||||||
|
custom_prompt_dict = custom_prompt_dict or litellm.custom_prompt_dict
|
||||||
model_response = vllm.completion(
|
model_response = vllm.completion(
|
||||||
model=model,
|
model=model,
|
||||||
messages=messages,
|
messages=messages,
|
||||||
|
custom_prompt_dict=custom_prompt_dict,
|
||||||
model_response=model_response,
|
model_response=model_response,
|
||||||
print_verbose=print_verbose,
|
print_verbose=print_verbose,
|
||||||
optional_params=optional_params,
|
optional_params=optional_params,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue