forked from phoenix/litellm-mirror
feat(proxy_server.py): handle pydantic mockselvar error
Fixes https://github.com/BerriAI/litellm/issues/4898#issuecomment-2252105485
This commit is contained in:
parent
35737d04d3
commit
1d6c39a607
2 changed files with 68 additions and 2 deletions
|
@ -2371,13 +2371,15 @@ async def async_data_generator(
|
||||||
try:
|
try:
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
async for chunk in response:
|
async for chunk in response:
|
||||||
|
verbose_proxy_logger.debug(
|
||||||
|
"async_data_generator: received streaming chunk - {}".format(chunk)
|
||||||
|
)
|
||||||
### CALL HOOKS ### - modify outgoing data
|
### CALL HOOKS ### - modify outgoing data
|
||||||
chunk = await proxy_logging_obj.async_post_call_streaming_hook(
|
chunk = await proxy_logging_obj.async_post_call_streaming_hook(
|
||||||
user_api_key_dict=user_api_key_dict, response=chunk
|
user_api_key_dict=user_api_key_dict, response=chunk
|
||||||
)
|
)
|
||||||
|
|
||||||
chunk = chunk.model_dump_json(exclude_none=True)
|
chunk = chunk.model_dump_json(exclude_none=True, exclude_unset=True)
|
||||||
try:
|
try:
|
||||||
yield f"data: {chunk}\n\n"
|
yield f"data: {chunk}\n\n"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
64
litellm/tests/test_pydantic.py
Normal file
64
litellm/tests/test_pydantic.py
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
|
||||||
|
sys.path.insert(
|
||||||
|
0, os.path.abspath("../..")
|
||||||
|
) # Adds the parent directory to the system path
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import litellm
|
||||||
|
from litellm.types.utils import (
|
||||||
|
ChatCompletionTokenLogprob,
|
||||||
|
ChoiceLogprobs,
|
||||||
|
Delta,
|
||||||
|
ModelResponse,
|
||||||
|
StreamingChoices,
|
||||||
|
TopLogprob,
|
||||||
|
)
|
||||||
|
|
||||||
|
obj = ModelResponse(
|
||||||
|
id="chat-f9bad6ec3c1146e99368682a0e7403fc",
|
||||||
|
choices=[
|
||||||
|
StreamingChoices(
|
||||||
|
finish_reason=None,
|
||||||
|
index=0,
|
||||||
|
delta=Delta(content="", role=None, function_call=None, tool_calls=None),
|
||||||
|
logprobs=ChoiceLogprobs(
|
||||||
|
content=[
|
||||||
|
ChatCompletionTokenLogprob(
|
||||||
|
token="",
|
||||||
|
bytes=[],
|
||||||
|
logprob=-0.00018153927521780133,
|
||||||
|
top_logprobs=[
|
||||||
|
TopLogprob(
|
||||||
|
token="", bytes=[], logprob=-0.00018153927521780133
|
||||||
|
),
|
||||||
|
TopLogprob(
|
||||||
|
token="\n\n", bytes=[10, 10], logprob=-9.062681198120117
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
],
|
||||||
|
created=1721976759,
|
||||||
|
model="Meta-Llama-3-8B-Instruct",
|
||||||
|
object="chat.completion.chunk",
|
||||||
|
system_fingerprint=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
print(obj.model_dump())
|
Loading…
Add table
Add a link
Reference in a new issue