mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
fix(anthropic.py): fix anthropic prompt
This commit is contained in:
parent
ba754a07a3
commit
a6968d06e6
2 changed files with 4 additions and 4 deletions
|
@ -8,8 +8,8 @@ from litellm.utils import ModelResponse
|
|||
import litellm
|
||||
|
||||
class AnthropicConstants(Enum):
|
||||
HUMAN_PROMPT = "\n\nHuman:"
|
||||
AI_PROMPT = "\n\nAssistant:"
|
||||
HUMAN_PROMPT = "\n\nHuman: "
|
||||
AI_PROMPT = "\n\nAssistant: "
|
||||
|
||||
class AnthropicError(Exception):
|
||||
def __init__(self, status_code, message):
|
||||
|
|
|
@ -164,8 +164,8 @@ class AmazonAI21Config():
|
|||
and v is not None}
|
||||
|
||||
class AnthropicConstants(Enum):
|
||||
HUMAN_PROMPT = "\n\nHuman:"
|
||||
AI_PROMPT = "\n\nAssistant:"
|
||||
HUMAN_PROMPT = "\n\nHuman: "
|
||||
AI_PROMPT = "\n\nAssistant: "
|
||||
|
||||
|
||||
def init_bedrock_client(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue