adding support for vllm

This commit is contained in:
Krrish Dholakia 2023-09-06 18:07:42 -07:00
parent 9abefa18b8
commit 4cfcabd919
17 changed files with 163 additions and 35 deletions

View file

@ -54,8 +54,8 @@ def completion(
model_prompt_details = custom_prompt_dict[model]
prompt = custom_prompt(
role_dict=model_prompt_details["roles"],
initial_prompt_value=model_prompt_details["pre_message_sep"],
final_prompt_value=model_prompt_details["post_message_sep"],
initial_prompt_value=model_prompt_details["initial_prompt_value"],
final_prompt_value=model_prompt_details["final_prompt_value"],
messages=messages
)
else: