diff --git a/litellm/llms/openai_like/chat/handler.py b/litellm/llms/openai_like/chat/handler.py index f34869bdac..b3b1488409 100644 --- a/litellm/llms/openai_like/chat/handler.py +++ b/litellm/llms/openai_like/chat/handler.py @@ -207,7 +207,6 @@ class OpenAILikeChatHandler(OpenAILikeBase): ) response.raise_for_status() except httpx.HTTPStatusError as e: - print(f"e.response.text: {e.response.text}") raise OpenAILikeError( status_code=e.response.status_code, message=e.response.text, @@ -215,7 +214,6 @@ class OpenAILikeChatHandler(OpenAILikeBase): except httpx.TimeoutException: raise OpenAILikeError(status_code=408, message="Timeout error occurred.") except Exception as e: - print(f"e: {e}") raise OpenAILikeError(status_code=500, message=str(e)) return OpenAILikeChatConfig._transform_response( diff --git a/litellm/llms/openai_like/chat/transformation.py b/litellm/llms/openai_like/chat/transformation.py index 2ea2010743..c8511cb630 100644 --- a/litellm/llms/openai_like/chat/transformation.py +++ b/litellm/llms/openai_like/chat/transformation.py @@ -75,7 +75,6 @@ class OpenAILikeChatConfig(OpenAIGPTConfig): custom_llm_provider: str, base_model: Optional[str], ) -> ModelResponse: - print(f"response: {response}") response_json = response.json() logging_obj.post_call( input=messages, diff --git a/ruff.toml b/ruff.toml index 09fccd657a..ba8f51d3c6 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,4 +1,4 @@ ignore = ["F405", "E402", "F401", "E501", "F403"] -extend-select = ["E501", "PLR0915"] +extend-select = ["E501", "PLR0915", "T20"] line-length = 120 exclude = ["litellm/types/*", "litellm/__init__.py", "litellm/proxy/example_config_yaml/*"] \ No newline at end of file