Merge pull request #2292 from BerriAI/litellm_mistral_streaming_error

fix(utils.py): handle mistral streaming error
This commit is contained in:
Krish Dholakia 2024-03-02 07:48:14 -08:00 committed by GitHub
commit ea1e0f5ad9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -6576,6 +6576,17 @@ def exception_type(
model=model,
response=original_exception.response,
)
elif "Mistral API raised a streaming error" in error_str:
exception_mapping_worked = True
_request = httpx.Request(
method="POST", url="https://api.openai.com/v1"
)
raise APIError(
message=f"{exception_provider} - {message}",
llm_provider=custom_llm_provider,
model=model,
response=httpx.Response(status_code=500, request=_request),
)
elif hasattr(original_exception, "status_code"):
exception_mapping_worked = True
if original_exception.status_code == 401:
@ -8792,6 +8803,10 @@ class CustomStreamWrapper:
completion_obj["content"] = response_obj["text"]
print_verbose(f"completion obj content: {completion_obj['content']}")
if response_obj["is_finished"]:
if response_obj["finish_reason"] == "error":
raise Exception(
"Mistral API raised a streaming error - finish_reason: error, no content string given."
)
model_response.choices[0].finish_reason = response_obj[
"finish_reason"
]