mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
refactor: replace .error() with .exception() logging for better debugging on sentry
This commit is contained in:
parent
1510daba4f
commit
61f4b71ef7
35 changed files with 242 additions and 253 deletions
|
@ -15,6 +15,7 @@ import httpx # type: ignore
|
|||
import requests # type: ignore
|
||||
|
||||
import litellm
|
||||
from litellm import verbose_logger
|
||||
from litellm.litellm_core_utils.core_helpers import map_finish_reason
|
||||
from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler
|
||||
from litellm.types.llms.databricks import GenericStreamingChunk
|
||||
|
@ -489,8 +490,13 @@ class CodestralTextCompletion(BaseLLM):
|
|||
message="HTTPStatusError - {}".format(e.response.text),
|
||||
)
|
||||
except Exception as e:
|
||||
verbose_logger.exception(
|
||||
"litellm.llms.text_completion_codestral.py::async_completion() - Exception occurred - {}".format(
|
||||
str(e)
|
||||
)
|
||||
)
|
||||
raise TextCompletionCodestralError(
|
||||
status_code=500, message="{}\n{}".format(str(e), traceback.format_exc())
|
||||
status_code=500, message="{}".format(str(e))
|
||||
)
|
||||
return self.process_text_completion_response(
|
||||
model=model,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue