mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
Merge pull request #4566 from BerriAI/litellm_anthropic_Streaming
fix - raise report Anthropic streaming errors
This commit is contained in:
commit
546ca2ce6f
1 changed files with 11 additions and 1 deletions
|
@ -49,7 +49,7 @@ class AnthropicConstants(Enum):
|
|||
class AnthropicError(Exception):
|
||||
def __init__(self, status_code, message):
|
||||
self.status_code = status_code
|
||||
self.message = message
|
||||
self.message: str = message
|
||||
self.request = httpx.Request(
|
||||
method="POST", url="https://api.anthropic.com/v1/messages"
|
||||
)
|
||||
|
@ -830,6 +830,16 @@ class ModelResponseIterator:
|
|||
.get("usage", {})
|
||||
.get("output_tokens", 0),
|
||||
)
|
||||
elif type_chunk == "error":
|
||||
"""
|
||||
{"type":"error","error":{"details":null,"type":"api_error","message":"Internal server error"} }
|
||||
"""
|
||||
_error_dict = chunk.get("error", {}) or {}
|
||||
message = _error_dict.get("message", None) or str(chunk)
|
||||
raise AnthropicError(
|
||||
message=message,
|
||||
status_code=500, # it looks like Anthropic API does not return a status code in the chunk error - default to 500
|
||||
)
|
||||
returned_chunk = GenericStreamingChunk(
|
||||
text=text,
|
||||
tool_use=tool_use,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue