From c151a1d2449eb645631c0e36ea037bf1a21f4ec2 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Fri, 28 Jun 2024 15:12:38 -0700 Subject: [PATCH] fix(http_handler.py): raise more detailed http status errors --- litellm/llms/anthropic.py | 38 ++++++++++++++++------- litellm/llms/custom_httpx/http_handler.py | 5 +++ litellm/utils.py | 5 ++- 3 files changed, 35 insertions(+), 13 deletions(-) diff --git a/litellm/llms/anthropic.py b/litellm/llms/anthropic.py index 808813c05..1051a56b7 100644 --- a/litellm/llms/anthropic.py +++ b/litellm/llms/anthropic.py @@ -1,23 +1,28 @@ -import os, types +import copy import json -from enum import Enum -import requests, copy # type: ignore +import os import time +import types +from enum import Enum from functools import partial -from typing import Callable, Optional, List, Union -import litellm.litellm_core_utils -from litellm.utils import ModelResponse, Usage, CustomStreamWrapper -from litellm.litellm_core_utils.core_helpers import map_finish_reason +from typing import Callable, List, Optional, Union + +import httpx # type: ignore +import requests # type: ignore + import litellm -from .prompt_templates.factory import prompt_factory, custom_prompt +import litellm.litellm_core_utils +from litellm.litellm_core_utils.core_helpers import map_finish_reason from litellm.llms.custom_httpx.http_handler import ( AsyncHTTPHandler, _get_async_httpx_client, _get_httpx_client, ) -from .base import BaseLLM -import httpx # type: ignore from litellm.types.llms.anthropic import AnthropicMessagesToolChoice +from litellm.utils import CustomStreamWrapper, ModelResponse, Usage + +from .base import BaseLLM +from .prompt_templates.factory import custom_prompt, prompt_factory class AnthropicConstants(Enum): @@ -179,10 +184,19 @@ async def make_call( if client is None: client = _get_async_httpx_client() # Create a new client if none provided - response = await client.post(api_base, headers=headers, data=data, stream=True) + try: + response = await client.post(api_base, headers=headers, data=data, stream=True) + except httpx.HTTPStatusError as e: + raise AnthropicError( + status_code=e.response.status_code, message=await e.response.aread() + ) + except Exception as e: + raise AnthropicError(status_code=500, message=str(e)) if response.status_code != 200: - raise AnthropicError(status_code=response.status_code, message=response.text) + raise AnthropicError( + status_code=response.status_code, message=await response.aread() + ) completion_stream = response.aiter_lines() diff --git a/litellm/llms/custom_httpx/http_handler.py b/litellm/llms/custom_httpx/http_handler.py index d24acaecc..dfb11f191 100644 --- a/litellm/llms/custom_httpx/http_handler.py +++ b/litellm/llms/custom_httpx/http_handler.py @@ -114,6 +114,11 @@ class AsyncHTTPHandler: finally: await new_client.aclose() except httpx.HTTPStatusError as e: + setattr(e, "status_code", e.response.status_code) + if stream is True: + setattr(e, "message", await e.response.aread()) + else: + setattr(e, "message", e.response.text) raise e except Exception as e: raise e diff --git a/litellm/utils.py b/litellm/utils.py index c53e8f338..0eedd259c 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -5728,7 +5728,10 @@ def exception_type( print() # noqa try: if model: - error_str = str(original_exception) + if hasattr(original_exception, "message"): + error_str = str(original_exception.message) + else: + error_str = str(original_exception) if isinstance(original_exception, BaseException): exception_type = type(original_exception).__name__ else: