mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
* add max_completion_tokens * add max_completion_tokens * add max_completion_tokens support for OpenAI models * add max_completion_tokens param * add max_completion_tokens for bedrock converse models * add test for converse maxTokens * fix openai o1 param mapping test * move test optional params * add max_completion_tokens for anthropic api * fix conftest * add max_completion tokens for vertex ai partner models * add max_completion_tokens for fireworks ai * add max_completion_tokens for hf rest api * add test for param mapping * add param mapping for vertex, gemini + testing * predibase is the most unstable and unusable llm api in prod, can't handle our ci/cd * add max_completion_tokens to openai supported params * fix fireworks ai param mapping
32 lines
958 B
Python
32 lines
958 B
Python
import os
|
|
import sys
|
|
|
|
import pytest
|
|
|
|
sys.path.insert(
|
|
0, os.path.abspath("../..")
|
|
) # Adds the parent directory to the system path
|
|
|
|
from litellm.llms.fireworks_ai import FireworksAIConfig
|
|
|
|
fireworks = FireworksAIConfig()
|
|
|
|
|
|
def test_map_openai_params_tool_choice():
|
|
# Test case 1: tool_choice is "required"
|
|
result = fireworks.map_openai_params({"tool_choice": "required"}, {}, "some_model")
|
|
assert result == {"tool_choice": "any"}
|
|
|
|
# Test case 2: tool_choice is "auto"
|
|
result = fireworks.map_openai_params({"tool_choice": "auto"}, {}, "some_model")
|
|
assert result == {"tool_choice": "auto"}
|
|
|
|
# Test case 3: tool_choice is not present
|
|
result = fireworks.map_openai_params(
|
|
{"some_other_param": "value"}, {}, "some_model"
|
|
)
|
|
assert result == {}
|
|
|
|
# Test case 4: tool_choice is None
|
|
result = fireworks.map_openai_params({"tool_choice": None}, {}, "some_model")
|
|
assert result == {"tool_choice": None}
|