Merge branch 'main' into litellm_maintain_Claude2_support

This commit is contained in:
Ishaan Jaff 2024-03-04 21:14:28 -08:00 committed by GitHub
commit f1c39f65d7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
24 changed files with 490 additions and 61 deletions

View file

@ -200,6 +200,10 @@ def map_finish_reason(
return "content_filter"
elif finish_reason == "STOP": # vertex ai
return "stop"
elif finish_reason == "end_turn" or finish_reason == "stop_sequence": # anthropic
return "stop"
elif finish_reason == "max_tokens": # anthropic
return "length"
return finish_reason
@ -4106,6 +4110,7 @@ def get_optional_params(
and custom_llm_provider != "anyscale"
and custom_llm_provider != "together_ai"
and custom_llm_provider != "mistral"
and custom_llm_provider != "anthropic"
):
if custom_llm_provider == "ollama" or custom_llm_provider == "ollama_chat":
# ollama actually supports json output
@ -4186,7 +4191,15 @@ def get_optional_params(
## raise exception if provider doesn't support passed in param
if custom_llm_provider == "anthropic":
## check if unsupported param passed in
supported_params = ["stream", "stop", "temperature", "top_p", "max_tokens"]
supported_params = [
"stream",
"stop",
"temperature",
"top_p",
"max_tokens",
"tools",
"tool_choice",
]
_check_valid_arg(supported_params=supported_params)
# handle anthropic params
if stream:
@ -4205,6 +4218,9 @@ def get_optional_params(
optional_params["max_tokens_to_sample"] = max_tokens
else:
optional_params["max_tokens"] = max_tokens
optional_params["max_tokens"] = max_tokens
if tools is not None:
optional_params["tools"] = tools
elif custom_llm_provider == "cohere":
## check if unsupported param passed in
supported_params = [