Merge pull request #4566 from BerriAI/litellm_anthropic_Streaming

fix - raise report Anthropic streaming errors
This commit is contained in:
Ishaan Jaff 2024-07-05 19:56:05 -07:00 committed by GitHub
commit 546ca2ce6f

View file

@ -49,7 +49,7 @@ class AnthropicConstants(Enum):
class AnthropicError(Exception): class AnthropicError(Exception):
def __init__(self, status_code, message): def __init__(self, status_code, message):
self.status_code = status_code self.status_code = status_code
self.message = message self.message: str = message
self.request = httpx.Request( self.request = httpx.Request(
method="POST", url="https://api.anthropic.com/v1/messages" method="POST", url="https://api.anthropic.com/v1/messages"
) )
@ -830,6 +830,16 @@ class ModelResponseIterator:
.get("usage", {}) .get("usage", {})
.get("output_tokens", 0), .get("output_tokens", 0),
) )
elif type_chunk == "error":
"""
{"type":"error","error":{"details":null,"type":"api_error","message":"Internal server error"} }
"""
_error_dict = chunk.get("error", {}) or {}
message = _error_dict.get("message", None) or str(chunk)
raise AnthropicError(
message=message,
status_code=500, # it looks like Anthropic API does not return a status code in the chunk error - default to 500
)
returned_chunk = GenericStreamingChunk( returned_chunk = GenericStreamingChunk(
text=text, text=text,
tool_use=tool_use, tool_use=tool_use,