fix test_all_model_configs

This commit is contained in:
Ishaan Jaff 2024-09-16 17:44:48 -07:00
parent 7b09591ca6
commit 4dcb092d12

View file

@ -235,7 +235,7 @@ def test_all_model_configs():
drop_params=False,
) == {"max_tokens": 10}
from litellm.llms.bedrock.chat import AmazonConverseConfig
from litellm.llms.bedrock.chat.converse_transformation import AmazonConverseConfig
assert (
"max_completion_tokens"