From 120791052288eed2fe8cda59c9b882f48a3f2dd2 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Mon, 13 Nov 2023 11:06:57 -0800 Subject: [PATCH] (fix) proxy cli maintain back comp with openai < 1.00 --- litellm/proxy/proxy_server.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 0079e3f9c..7ea0da58d 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -410,7 +410,10 @@ def data_generator(response): print_verbose("inside generator") for chunk in response: print_verbose(f"returned chunk: {chunk}") - yield f"data: {json.dumps(chunk.dict())}\n\n" + try: + yield f"data: {json.dumps(chunk.dict())}\n\n" + except: + yield f"data: {json.dumps(chunk)}\n\n" def litellm_completion(*args, **kwargs):