diff --git a/litellm/llms/anthropic.py b/litellm/llms/anthropic.py index 377235dee..3c130aafc 100644 --- a/litellm/llms/anthropic.py +++ b/litellm/llms/anthropic.py @@ -101,13 +101,13 @@ class AnthropicConfig: optional_params["max_tokens"] = value if param == "tools": optional_params["tools"] = value - if param == "stream": + if param == "stream" and value == True: optional_params["stream"] = value if param == "stop": if isinstance(value, str): if ( value == "\n" - ): # anthropic doesn't allow whitespace characters as stop-sequences + ) and litellm.drop_params == True: # anthropic doesn't allow whitespace characters as stop-sequences continue value = [value] elif isinstance(value, list): @@ -115,10 +115,13 @@ class AnthropicConfig: for v in value: if ( v == "\n" - ): # anthropic doesn't allow whitespace characters as stop-sequences + ) and litellm.drop_params == True: # anthropic doesn't allow whitespace characters as stop-sequences continue new_v.append(v) - value = new_v + if len(new_v) > 0: + value = new_v + else: + continue optional_params["stop_sequences"] = value if param == "temperature": optional_params["temperature"] = value diff --git a/litellm/tests/test_optional_params.py b/litellm/tests/test_optional_params.py index 4fa8df3b6..c6e0d7a5b 100644 --- a/litellm/tests/test_optional_params.py +++ b/litellm/tests/test_optional_params.py @@ -5,13 +5,27 @@ import pytest sys.path.insert(0, os.path.abspath("../..")) import litellm -from litellm.utils import get_optional_params_embeddings +from litellm.utils import get_optional_params_embeddings, get_optional_params ## get_optional_params_embeddings ### Models: OpenAI, Azure, Bedrock ### Scenarios: w/ optional params + litellm.drop_params = True +@pytest.mark.parametrize( + "stop_sequence, expected_count", [("\n", 0), (["\n"], 0), (["finish_reason"], 1)] +) +def test_anthropic_optional_params(stop_sequence, expected_count): + """ + Test if whitespace character optional param is dropped by anthropic + """ + litellm.drop_params = True + optional_params = get_optional_params( + model="claude-3", custom_llm_provider="anthropic", stop=stop_sequence + ) + assert len(optional_params) == expected_count + + def test_bedrock_optional_params_embeddings(): litellm.drop_params = True optional_params = get_optional_params_embeddings( diff --git a/litellm/utils.py b/litellm/utils.py index 63684766b..dceb280c9 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -5006,26 +5006,9 @@ def get_optional_params( model=model, custom_llm_provider=custom_llm_provider ) _check_valid_arg(supported_params=supported_params) - # handle anthropic params - if stream: - optional_params["stream"] = stream - if stop is not None: - if type(stop) == str: - stop = [stop] # openai can accept str/list for stop - optional_params["stop_sequences"] = stop - if temperature is not None: - optional_params["temperature"] = temperature - if top_p is not None: - optional_params["top_p"] = top_p - if max_tokens is not None: - if (model == "claude-2") or (model == "claude-instant-1"): - # these models use antropic_text.py which only accepts max_tokens_to_sample - optional_params["max_tokens_to_sample"] = max_tokens - else: - optional_params["max_tokens"] = max_tokens - optional_params["max_tokens"] = max_tokens - if tools is not None: - optional_params["tools"] = tools + optional_params = litellm.AnthropicConfig().map_openai_params( + non_default_params=non_default_params, optional_params=optional_params + ) elif custom_llm_provider == "cohere": ## check if unsupported param passed in supported_params = get_supported_openai_params( @@ -5929,15 +5912,7 @@ def get_supported_openai_params(model: str, custom_llm_provider: str): elif custom_llm_provider == "ollama_chat": return litellm.OllamaChatConfig().get_supported_openai_params() elif custom_llm_provider == "anthropic": - return [ - "stream", - "stop", - "temperature", - "top_p", - "max_tokens", - "tools", - "tool_choice", - ] + return litellm.AnthropicConfig().get_supported_openai_params() elif custom_llm_provider == "groq": return [ "temperature",