doc cleanup for custom prompt templates

This commit is contained in:
Krrish Dholakia 2023-09-12 12:16:44 -07:00
parent e6d48d91ce
commit e80457013b
6 changed files with 40 additions and 21 deletions

View file

@ -42,24 +42,21 @@ def default_pt(messages):
```python ```python
# Create your own custom prompt template works # Create your own custom prompt template works
litellm.register_prompt_template( litellm.register_prompt_template(
model="togethercomputer/LLaMA-2-7B-32K", model="togethercomputer/LLaMA-2-7B-32K",
role_dict={ roles={
"system": { "system": {
"pre_message": "[INST] <<SYS>>\n", "pre_message": "[INST] <<SYS>>\n",
"post_message": "\n<</SYS>>\n [/INST]\n" "post_message": "\n<</SYS>>\n [/INST]\n"
}, },
"user": { "user": {
"pre_message": "[INST] ", "pre_message": "[INST] ",
"post_message": " [/INST]\n" "post_message": " [/INST]\n"
}, },
"assistant": { "assistant": {
"pre_message": "\n", "post_message": "\n"
"post_message": "\n",
} }
} # tell LiteLLM how you want to map the openai messages to this model }
pre_message_sep= "\n", )
post_message_sep= "\n"
)
def test_huggingface_custom_model(): def test_huggingface_custom_model():
model = "huggingface/togethercomputer/LLaMA-2-7B-32K" model = "huggingface/togethercomputer/LLaMA-2-7B-32K"

View file

@ -75,11 +75,22 @@ Let's register our custom prompt template: [Implementation Code](https://github.
import litellm import litellm
litellm.register_prompt_template( litellm.register_prompt_template(
model="OpenAssistant/llama2-70b-oasst-sft-v10", model="OpenAssistant/llama2-70b-oasst-sft-v10",
roles={"system":"<|im_start|>system", "assistant":"<|im_start|>assistant", "user":"<|im_start|>user"}, # tell LiteLLM how you want to map the openai messages to this model roles={
pre_message_sep= "\n", "system": {
post_message_sep= "\n" "pre_message": "[<|im_start|>system",
) "post_message": "\n"
},
"user": {
"pre_message": "<|im_start|>user",
"post_message": "\n"
},
"assistant": {
"pre_message": "<|im_start|>assistant",
"post_message": "\n"
}
}
)
``` ```
Let's use it! Let's use it!
@ -105,11 +116,22 @@ from litellm import completion
os.environ["TOGETHERAI_API_KEY"] = "" os.environ["TOGETHERAI_API_KEY"] = ""
litellm.register_prompt_template( litellm.register_prompt_template(
model="OpenAssistant/llama2-70b-oasst-sft-v10", model="OpenAssistant/llama2-70b-oasst-sft-v10",
roles={"system":"<|im_start|>system", "assistant":"<|im_start|>assistant", "user":"<|im_start|>user"}, # tell LiteLLM how you want to map the openai messages to this model roles={
pre_message_sep= "\n", "system": {
post_message_sep= "\n" "pre_message": "[<|im_start|>system",
) "post_message": "\n"
},
"user": {
"pre_message": "<|im_start|>user",
"post_message": "\n"
},
"assistant": {
"pre_message": "<|im_start|>assistant",
"post_message": "\n"
}
}
)
messages=[{"role":"user", "content": "Write me a poem about the blue sky"}] messages=[{"role":"user", "content": "Write me a poem about the blue sky"}]

View file

@ -95,7 +95,7 @@ def default_pt(messages):
# Create your own custom prompt template works # Create your own custom prompt template works
litellm.register_prompt_template( litellm.register_prompt_template(
model="togethercomputer/LLaMA-2-7B-32K", model="togethercomputer/LLaMA-2-7B-32K",
role_dict={ roles={
"system": { "system": {
"pre_message": "[INST] <<SYS>>\n", "pre_message": "[INST] <<SYS>>\n",
"post_message": "\n<</SYS>>\n [/INST]\n" "post_message": "\n<</SYS>>\n [/INST]\n"

View file

@ -1538,7 +1538,7 @@ def register_prompt_template(model: str, roles: dict, initial_prompt_value: str
"assistant": { "assistant": {
"post_message": "\n" # follows this - https://replicate.com/blog/how-to-prompt-llama "post_message": "\n" # follows this - https://replicate.com/blog/how-to-prompt-llama
} }
}, }
) )
``` ```
""" """