mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
(code quality) Add ruff check to ban print
in repo (#7233)
* fix ruff print check * fix ruff check
This commit is contained in:
parent
52cad4b3f2
commit
3fca0d756d
3 changed files with 1 additions and 4 deletions
|
@ -207,7 +207,6 @@ class OpenAILikeChatHandler(OpenAILikeBase):
|
|||
)
|
||||
response.raise_for_status()
|
||||
except httpx.HTTPStatusError as e:
|
||||
print(f"e.response.text: {e.response.text}")
|
||||
raise OpenAILikeError(
|
||||
status_code=e.response.status_code,
|
||||
message=e.response.text,
|
||||
|
@ -215,7 +214,6 @@ class OpenAILikeChatHandler(OpenAILikeBase):
|
|||
except httpx.TimeoutException:
|
||||
raise OpenAILikeError(status_code=408, message="Timeout error occurred.")
|
||||
except Exception as e:
|
||||
print(f"e: {e}")
|
||||
raise OpenAILikeError(status_code=500, message=str(e))
|
||||
|
||||
return OpenAILikeChatConfig._transform_response(
|
||||
|
|
|
@ -75,7 +75,6 @@ class OpenAILikeChatConfig(OpenAIGPTConfig):
|
|||
custom_llm_provider: str,
|
||||
base_model: Optional[str],
|
||||
) -> ModelResponse:
|
||||
print(f"response: {response}")
|
||||
response_json = response.json()
|
||||
logging_obj.post_call(
|
||||
input=messages,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
ignore = ["F405", "E402", "F401", "E501", "F403"]
|
||||
extend-select = ["E501", "PLR0915"]
|
||||
extend-select = ["E501", "PLR0915", "T20"]
|
||||
line-length = 120
|
||||
exclude = ["litellm/types/*", "litellm/__init__.py", "litellm/proxy/example_config_yaml/*"]
|
Loading…
Add table
Add a link
Reference in a new issue