diff --git a/litellm/llms/huggingface_restapi.py b/litellm/llms/huggingface_restapi.py index cb1eb0152f..e73eba191e 100644 --- a/litellm/llms/huggingface_restapi.py +++ b/litellm/llms/huggingface_restapi.py @@ -52,7 +52,12 @@ def completion( if model in custom_prompt_dict: # check if the model has a registered custom prompt model_prompt_details = custom_prompt_dict[model] - prompt = custom_prompt(role_dict=model_prompt_details["roles"], pre_message_sep=model_prompt_details["pre_message_sep"], post_message_sep=model_prompt_details["post_message_sep"], messages=messages) + prompt = custom_prompt( + role_dict=model_prompt_details["roles"], + initial_prompt_value=model_prompt_details["pre_message_sep"], + final_prompt_value=model_prompt_details["post_message_sep"], + messages=messages + ) else: prompt = prompt_factory(model=model, messages=messages) ### MAP INPUT PARAMS diff --git a/litellm/llms/together_ai.py b/litellm/llms/together_ai.py index d55616ef17..96baccb644 100644 --- a/litellm/llms/together_ai.py +++ b/litellm/llms/together_ai.py @@ -45,7 +45,12 @@ def completion( if model in custom_prompt_dict: # check if the model has a registered custom prompt model_prompt_details = custom_prompt_dict[model] - prompt = custom_prompt(role_dict=model_prompt_details["roles"], pre_message_sep=model_prompt_details["pre_message_sep"], post_message_sep=model_prompt_details["post_message_sep"], messages=messages) + prompt = custom_prompt( + role_dict=model_prompt_details["roles"], + initial_prompt_value=model_prompt_details["pre_message_sep"], + final_prompt_value=model_prompt_details["post_message_sep"], + messages=messages + ) else: prompt = prompt_factory(model=model, messages=messages) data = {