forked from phoenix/litellm-mirror
fix(proxy_server.py): drop none values in streaming response
This commit is contained in:
parent
05e096ce25
commit
57998c28dc
2 changed files with 15 additions and 11 deletions
|
@ -2115,10 +2115,9 @@ async def async_data_generator(response, user_api_key_dict):
|
|||
try:
|
||||
start_time = time.time()
|
||||
async for chunk in response:
|
||||
verbose_proxy_logger.debug(f"returned chunk: {chunk}")
|
||||
assert isinstance(chunk, litellm.ModelResponse)
|
||||
chunk = chunk.model_dump_json(exclude_none=True)
|
||||
try:
|
||||
yield f"data: {json.dumps(chunk.model_dump(exclude_none=True))}\n\n"
|
||||
yield f"data: {chunk}\n\n"
|
||||
except Exception as e:
|
||||
yield f"data: {str(e)}\n\n"
|
||||
|
||||
|
|
|
@ -205,18 +205,18 @@ def map_finish_reason(
|
|||
|
||||
class FunctionCall(OpenAIObject):
|
||||
arguments: str
|
||||
name: str
|
||||
name: Optional[str] = None
|
||||
|
||||
|
||||
class Function(OpenAIObject):
|
||||
arguments: str
|
||||
name: str
|
||||
name: Optional[str] = None
|
||||
|
||||
|
||||
class ChatCompletionDeltaToolCall(OpenAIObject):
|
||||
id: str
|
||||
id: Optional[str] = None
|
||||
function: Function
|
||||
type: str
|
||||
type: Optional[str] = None
|
||||
index: int
|
||||
|
||||
|
||||
|
@ -275,8 +275,11 @@ class Delta(OpenAIObject):
|
|||
super(Delta, self).__init__(**params)
|
||||
self.content = content
|
||||
self.role = role
|
||||
if function_call is not None and isinstance(function_call, dict):
|
||||
self.function_call = FunctionCall(**function_call)
|
||||
else:
|
||||
self.function_call = function_call
|
||||
if tool_calls is not None and isinstance(tool_calls, dict):
|
||||
if tool_calls is not None and isinstance(tool_calls, list):
|
||||
self.tool_calls = []
|
||||
for tool_call in tool_calls:
|
||||
if tool_call.get("index", None) is None:
|
||||
|
@ -8727,7 +8730,7 @@ class CustomStreamWrapper:
|
|||
or original_chunk.choices[0].delta.tool_calls is not None
|
||||
):
|
||||
try:
|
||||
delta = dict(original_chunk.choices[0].delta)
|
||||
delta = original_chunk.choices[0].delta
|
||||
model_response.system_fingerprint = (
|
||||
original_chunk.system_fingerprint
|
||||
)
|
||||
|
@ -8762,7 +8765,9 @@ class CustomStreamWrapper:
|
|||
is None
|
||||
):
|
||||
t.function.arguments = ""
|
||||
model_response.choices[0].delta = Delta(**delta)
|
||||
_json_delta = delta.model_dump()
|
||||
print_verbose(f"_json_delta: {_json_delta}")
|
||||
model_response.choices[0].delta = Delta(**_json_delta)
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
model_response.choices[0].delta = Delta()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue