diff --git a/tests/llm_translation/test_max_completion_tokens.py b/tests/llm_translation/test_max_completion_tokens.py index a8f3dd50a8..f1374a22a2 100644 --- a/tests/llm_translation/test_max_completion_tokens.py +++ b/tests/llm_translation/test_max_completion_tokens.py @@ -330,7 +330,7 @@ def test_all_model_configs(): drop_params=False, ) == {"max_tokens_to_sample": 10} - from litellm.llms.databricks.chat.handler import DatabricksConfig + from litellm.llms.databricks.chat.transformation import DatabricksConfig assert "max_completion_tokens" in DatabricksConfig().get_supported_openai_params()