forked from phoenix/litellm-mirror
fix(http_handler.py): raise more detailed http status errors
This commit is contained in:
parent
0001683036
commit
c151a1d244
3 changed files with 35 additions and 13 deletions
|
@ -1,23 +1,28 @@
|
|||
import os, types
|
||||
import copy
|
||||
import json
|
||||
from enum import Enum
|
||||
import requests, copy # type: ignore
|
||||
import os
|
||||
import time
|
||||
import types
|
||||
from enum import Enum
|
||||
from functools import partial
|
||||
from typing import Callable, Optional, List, Union
|
||||
import litellm.litellm_core_utils
|
||||
from litellm.utils import ModelResponse, Usage, CustomStreamWrapper
|
||||
from litellm.litellm_core_utils.core_helpers import map_finish_reason
|
||||
from typing import Callable, List, Optional, Union
|
||||
|
||||
import httpx # type: ignore
|
||||
import requests # type: ignore
|
||||
|
||||
import litellm
|
||||
from .prompt_templates.factory import prompt_factory, custom_prompt
|
||||
import litellm.litellm_core_utils
|
||||
from litellm.litellm_core_utils.core_helpers import map_finish_reason
|
||||
from litellm.llms.custom_httpx.http_handler import (
|
||||
AsyncHTTPHandler,
|
||||
_get_async_httpx_client,
|
||||
_get_httpx_client,
|
||||
)
|
||||
from .base import BaseLLM
|
||||
import httpx # type: ignore
|
||||
from litellm.types.llms.anthropic import AnthropicMessagesToolChoice
|
||||
from litellm.utils import CustomStreamWrapper, ModelResponse, Usage
|
||||
|
||||
from .base import BaseLLM
|
||||
from .prompt_templates.factory import custom_prompt, prompt_factory
|
||||
|
||||
|
||||
class AnthropicConstants(Enum):
|
||||
|
@ -179,10 +184,19 @@ async def make_call(
|
|||
if client is None:
|
||||
client = _get_async_httpx_client() # Create a new client if none provided
|
||||
|
||||
try:
|
||||
response = await client.post(api_base, headers=headers, data=data, stream=True)
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise AnthropicError(
|
||||
status_code=e.response.status_code, message=await e.response.aread()
|
||||
)
|
||||
except Exception as e:
|
||||
raise AnthropicError(status_code=500, message=str(e))
|
||||
|
||||
if response.status_code != 200:
|
||||
raise AnthropicError(status_code=response.status_code, message=response.text)
|
||||
raise AnthropicError(
|
||||
status_code=response.status_code, message=await response.aread()
|
||||
)
|
||||
|
||||
completion_stream = response.aiter_lines()
|
||||
|
||||
|
|
|
@ -114,6 +114,11 @@ class AsyncHTTPHandler:
|
|||
finally:
|
||||
await new_client.aclose()
|
||||
except httpx.HTTPStatusError as e:
|
||||
setattr(e, "status_code", e.response.status_code)
|
||||
if stream is True:
|
||||
setattr(e, "message", await e.response.aread())
|
||||
else:
|
||||
setattr(e, "message", e.response.text)
|
||||
raise e
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
|
|
@ -5728,6 +5728,9 @@ def exception_type(
|
|||
print() # noqa
|
||||
try:
|
||||
if model:
|
||||
if hasattr(original_exception, "message"):
|
||||
error_str = str(original_exception.message)
|
||||
else:
|
||||
error_str = str(original_exception)
|
||||
if isinstance(original_exception, BaseException):
|
||||
exception_type = type(original_exception).__name__
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue