fix(anthropic.py): handle whitespace characters for anthropic calls

This commit is contained in:
Krrish Dholakia 2024-05-03 17:31:34 -07:00
parent 0b9fa53e3e
commit 097714e02f
3 changed files with 26 additions and 34 deletions

View file

@ -5006,26 +5006,9 @@ def get_optional_params(
model=model, custom_llm_provider=custom_llm_provider
)
_check_valid_arg(supported_params=supported_params)
# handle anthropic params
if stream:
optional_params["stream"] = stream
if stop is not None:
if type(stop) == str:
stop = [stop] # openai can accept str/list for stop
optional_params["stop_sequences"] = stop
if temperature is not None:
optional_params["temperature"] = temperature
if top_p is not None:
optional_params["top_p"] = top_p
if max_tokens is not None:
if (model == "claude-2") or (model == "claude-instant-1"):
# these models use antropic_text.py which only accepts max_tokens_to_sample
optional_params["max_tokens_to_sample"] = max_tokens
else:
optional_params["max_tokens"] = max_tokens
optional_params["max_tokens"] = max_tokens
if tools is not None:
optional_params["tools"] = tools
optional_params = litellm.AnthropicConfig().map_openai_params(
non_default_params=non_default_params, optional_params=optional_params
)
elif custom_llm_provider == "cohere":
## check if unsupported param passed in
supported_params = get_supported_openai_params(
@ -5929,15 +5912,7 @@ def get_supported_openai_params(model: str, custom_llm_provider: str):
elif custom_llm_provider == "ollama_chat":
return litellm.OllamaChatConfig().get_supported_openai_params()
elif custom_llm_provider == "anthropic":
return [
"stream",
"stop",
"temperature",
"top_p",
"max_tokens",
"tools",
"tool_choice",
]
return litellm.AnthropicConfig().get_supported_openai_params()
elif custom_llm_provider == "groq":
return [
"temperature",