fix(litellm_logging.py): fix calling success callback w/ stream_options true

Fixes https://github.com/BerriAI/litellm/issues/5118
This commit is contained in:
Krrish Dholakia 2024-08-09 18:20:42 -07:00
parent 9f0a05d406
commit a26b23a3f4
4 changed files with 50 additions and 28 deletions

View file

@ -14,6 +14,7 @@ from pydantic import BaseModel
sys.path.insert(0, os.path.abspath("../.."))
from typing import List, Literal, Optional, Union
from unittest.mock import AsyncMock, MagicMock, patch
import litellm
from litellm import Cache, completion, embedding
@ -518,6 +519,29 @@ async def test_async_chat_azure_stream():
# asyncio.run(test_async_chat_azure_stream())
@pytest.mark.asyncio
async def test_async_chat_openai_stream_options():
try:
customHandler = CompletionCustomHandler()
litellm.callbacks = [customHandler]
with patch.object(
customHandler, "async_log_success_event", new=AsyncMock()
) as mock_client:
response = await litellm.acompletion(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Hi 👋 - i'm async openai"}],
stream=True,
stream_options={"include_usage": True},
)
async for chunk in response:
continue
mock_client.assert_awaited_once()
except Exception as e:
pytest.fail(f"An exception occurred: {str(e)}")
## Test Bedrock + sync
def test_chat_bedrock_stream():
try: