mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
fix(factory.py): fix prompt mapping
This commit is contained in:
parent
0c11827a53
commit
091cea1acb
1 changed files with 1 additions and 1 deletions
|
@ -829,7 +829,7 @@ def prompt_factory(
|
||||||
if custom_llm_provider == "ollama":
|
if custom_llm_provider == "ollama":
|
||||||
return ollama_pt(model=model, messages=messages)
|
return ollama_pt(model=model, messages=messages)
|
||||||
elif custom_llm_provider == "anthropic":
|
elif custom_llm_provider == "anthropic":
|
||||||
if model == "claude-instant-1" or model == "claude-2.1":
|
if model == "claude-instant-1" or model == "claude-2":
|
||||||
return anthropic_pt(messages=messages)
|
return anthropic_pt(messages=messages)
|
||||||
return anthropic_messages_pt(messages=messages)
|
return anthropic_messages_pt(messages=messages)
|
||||||
elif custom_llm_provider == "together_ai":
|
elif custom_llm_provider == "together_ai":
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue