mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
adding support for custom prompt templates to together ai
This commit is contained in:
parent
090ec35a4d
commit
cc59328d32
3 changed files with 118 additions and 28 deletions
|
@ -396,7 +396,21 @@ def test_completion_together_ai():
|
|||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
test_completion_together_ai()
|
||||
|
||||
def test_customprompt_together_ai():
|
||||
try:
|
||||
litellm.register_prompt_template(
|
||||
model="OpenAssistant/llama2-70b-oasst-sft-v10",
|
||||
roles={"system":"<|im_start|>system", "assistant":"<|im_start|>assistant", "user":"<|im_start|>user"}, # tell LiteLLM how you want to map the openai messages to this model
|
||||
pre_message_sep= "\n",
|
||||
post_message_sep= "\n"
|
||||
)
|
||||
response = completion(model="together_ai/OpenAssistant/llama2-70b-oasst-sft-v10", messages=messages)
|
||||
print(response)
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
test_customprompt_together_ai()
|
||||
|
||||
def test_completion_sagemaker():
|
||||
try:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue