doc cleanup for custom prompt templates

This commit is contained in:
Krrish Dholakia 2023-09-12 12:16:44 -07:00
parent e6d48d91ce
commit e80457013b
6 changed files with 40 additions and 21 deletions

View file

@ -43,7 +43,7 @@ def default_pt(messages):
# Create your own custom prompt template works # Create your own custom prompt template works
litellm.register_prompt_template( litellm.register_prompt_template(
model="togethercomputer/LLaMA-2-7B-32K", model="togethercomputer/LLaMA-2-7B-32K",
role_dict={ roles={
"system": { "system": {
"pre_message": "[INST] <<SYS>>\n", "pre_message": "[INST] <<SYS>>\n",
"post_message": "\n<</SYS>>\n [/INST]\n" "post_message": "\n<</SYS>>\n [/INST]\n"
@ -53,12 +53,9 @@ litellm.register_prompt_template(
"post_message": " [/INST]\n" "post_message": " [/INST]\n"
}, },
"assistant": { "assistant": {
"pre_message": "\n", "post_message": "\n"
"post_message": "\n", }
} }
} # tell LiteLLM how you want to map the openai messages to this model
pre_message_sep= "\n",
post_message_sep= "\n"
) )
def test_huggingface_custom_model(): def test_huggingface_custom_model():

View file

@ -76,9 +76,20 @@ import litellm
litellm.register_prompt_template( litellm.register_prompt_template(
model="OpenAssistant/llama2-70b-oasst-sft-v10", model="OpenAssistant/llama2-70b-oasst-sft-v10",
roles={"system":"<|im_start|>system", "assistant":"<|im_start|>assistant", "user":"<|im_start|>user"}, # tell LiteLLM how you want to map the openai messages to this model roles={
pre_message_sep= "\n", "system": {
post_message_sep= "\n" "pre_message": "[<|im_start|>system",
"post_message": "\n"
},
"user": {
"pre_message": "<|im_start|>user",
"post_message": "\n"
},
"assistant": {
"pre_message": "<|im_start|>assistant",
"post_message": "\n"
}
}
) )
``` ```
@ -106,9 +117,20 @@ os.environ["TOGETHERAI_API_KEY"] = ""
litellm.register_prompt_template( litellm.register_prompt_template(
model="OpenAssistant/llama2-70b-oasst-sft-v10", model="OpenAssistant/llama2-70b-oasst-sft-v10",
roles={"system":"<|im_start|>system", "assistant":"<|im_start|>assistant", "user":"<|im_start|>user"}, # tell LiteLLM how you want to map the openai messages to this model roles={
pre_message_sep= "\n", "system": {
post_message_sep= "\n" "pre_message": "[<|im_start|>system",
"post_message": "\n"
},
"user": {
"pre_message": "<|im_start|>user",
"post_message": "\n"
},
"assistant": {
"pre_message": "<|im_start|>assistant",
"post_message": "\n"
}
}
) )
messages=[{"role":"user", "content": "Write me a poem about the blue sky"}] messages=[{"role":"user", "content": "Write me a poem about the blue sky"}]

View file

@ -95,7 +95,7 @@ def default_pt(messages):
# Create your own custom prompt template works # Create your own custom prompt template works
litellm.register_prompt_template( litellm.register_prompt_template(
model="togethercomputer/LLaMA-2-7B-32K", model="togethercomputer/LLaMA-2-7B-32K",
role_dict={ roles={
"system": { "system": {
"pre_message": "[INST] <<SYS>>\n", "pre_message": "[INST] <<SYS>>\n",
"post_message": "\n<</SYS>>\n [/INST]\n" "post_message": "\n<</SYS>>\n [/INST]\n"

View file

@ -1538,7 +1538,7 @@ def register_prompt_template(model: str, roles: dict, initial_prompt_value: str
"assistant": { "assistant": {
"post_message": "\n" # follows this - https://replicate.com/blog/how-to-prompt-llama "post_message": "\n" # follows this - https://replicate.com/blog/how-to-prompt-llama
} }
}, }
) )
``` ```
""" """