mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
fix(proxy_server.py): fixes for making rejected responses work with streaming
This commit is contained in:
parent
f11f207ae6
commit
b41f30ca60
4 changed files with 34 additions and 22 deletions
|
@ -193,13 +193,15 @@ class _OPTIONAL_PromptInjectionDetection(CustomLogger):
|
|||
return data
|
||||
|
||||
except HTTPException as e:
|
||||
|
||||
if (
|
||||
e.status_code == 400
|
||||
and isinstance(e.detail, dict)
|
||||
and "error" in e.detail
|
||||
and self.prompt_injection_params is not None
|
||||
and self.prompt_injection_params.reject_as_response
|
||||
):
|
||||
if self.prompt_injection_params.reject_as_response:
|
||||
return e.detail["error"]
|
||||
return e.detail["error"]
|
||||
raise e
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue