From 44c1dedf64146faf61c5963071f51e198ac75bf2 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Mon, 6 May 2024 11:13:07 -0700 Subject: [PATCH] test fix - test_async_chat_openai_stream --- litellm/tests/test_custom_logger.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/litellm/tests/test_custom_logger.py b/litellm/tests/test_custom_logger.py index 347fb57ba..9c2afe5a3 100644 --- a/litellm/tests/test_custom_logger.py +++ b/litellm/tests/test_custom_logger.py @@ -90,10 +90,10 @@ class TmpFunction: print(f"ON ASYNC LOGGING") self.async_success = True print( - f'kwargs.get("complete_streaming_response"): {kwargs.get("complete_streaming_response")}' + f'kwargs.get("async_complete_streaming_response"): {kwargs.get("async_complete_streaming_response")}' ) self.complete_streaming_response_in_callback = kwargs.get( - "complete_streaming_response" + "async_complete_streaming_response" ) @@ -115,6 +115,10 @@ async def test_async_chat_openai_stream(): print(complete_streaming_response) complete_streaming_response = complete_streaming_response.strip("'") + + await asyncio.sleep(3) + + # problematic line response1 = tmp_function.complete_streaming_response_in_callback["choices"][0][ "message" ]["content"]