mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix(proxy_server.py): raise streaming exceptions
This commit is contained in:
parent
0e08a0082b
commit
343a06fd84
4 changed files with 25 additions and 17 deletions
|
@ -213,10 +213,7 @@ async def _async_streaming(response, model, custom_llm_provider, args):
|
|||
print_verbose(f"line in async streaming: {line}")
|
||||
yield line
|
||||
except Exception as e:
|
||||
print_verbose(f"error raised _async_streaming: {traceback.format_exc()}")
|
||||
raise exception_type(
|
||||
model=model, custom_llm_provider=custom_llm_provider, original_exception=e, completion_kwargs=args,
|
||||
)
|
||||
raise e
|
||||
|
||||
def mock_completion(model: str, messages: List, stream: Optional[bool] = False, mock_response: str = "This is a mock request", **kwargs):
|
||||
"""
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue