From 1f18093b631b96bd9084ae77523659d7753692bd Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Mon, 11 Dec 2023 17:41:47 -0800 Subject: [PATCH] test(test_custom_logger.py): reset cache test correctly --- .gitignore | 1 + litellm/tests/test_custom_callback_router.py | 21 +++++++++++--------- litellm/tests/test_custom_logger.py | 8 ++------ 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/.gitignore b/.gitignore index 088996ddd..d0f88d972 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,4 @@ litellm/proxy/_secret_config.yaml litellm/tests/aiologs.log litellm/tests/exception_data.txt litellm/tests/config_*.yaml +litellm/tests/langfuse.log diff --git a/litellm/tests/test_custom_callback_router.py b/litellm/tests/test_custom_callback_router.py index 00e5fd308..d9f67d6e3 100644 --- a/litellm/tests/test_custom_callback_router.py +++ b/litellm/tests/test_custom_callback_router.py @@ -258,10 +258,10 @@ class CompletionCustomHandler(CustomLogger): # https://docs.litellm.ai/docs/obse @pytest.mark.asyncio async def test_async_chat_azure(): try: - customHandler = CompletionCustomHandler() - customHandler_streaming = CompletionCustomHandler() + customHandler_completion_azure_router = CompletionCustomHandler() + customHandler_streaming_azure_router = CompletionCustomHandler() customHandler_failure = CompletionCustomHandler() - litellm.callbacks = [customHandler] + litellm.callbacks = [customHandler_completion_azure_router] model_list = [ { "model_name": "gpt-3.5-turbo", # openai model name @@ -282,10 +282,10 @@ async def test_async_chat_azure(): "content": "Hi 👋 - i'm openai" }]) await asyncio.sleep(2) - assert len(customHandler.errors) == 0 - assert len(customHandler.states) == 3 # pre, post, success + assert len(customHandler_completion_azure_router.errors) == 0 + assert len(customHandler_completion_azure_router.states) == 3 # pre, post, success # streaming - litellm.callbacks = [customHandler_streaming] + litellm.callbacks = [customHandler_streaming_azure_router] router2 = Router(model_list=model_list) # type: ignore response = await router2.acompletion(model="gpt-3.5-turbo", messages=[{ @@ -294,11 +294,12 @@ async def test_async_chat_azure(): }], stream=True) async for chunk in response: + print(f"async azure router chunk: {chunk}") continue await asyncio.sleep(1) - print(f"customHandler.states: {customHandler_streaming.states}") - assert len(customHandler_streaming.errors) == 0 - assert len(customHandler_streaming.states) >= 4 # pre, post, stream (multiple times), success + print(f"customHandler.states: {customHandler_streaming_azure_router.states}") + assert len(customHandler_streaming_azure_router.errors) == 0 + assert len(customHandler_streaming_azure_router.states) >= 4 # pre, post, stream (multiple times), success # failure model_list = [ { @@ -396,6 +397,7 @@ async def test_async_embedding_azure(): async def test_async_chat_azure_with_fallbacks(): try: customHandler_fallbacks = CompletionCustomHandler() + litellm.callbacks = [customHandler_fallbacks] # with fallbacks model_list = [ { @@ -428,6 +430,7 @@ async def test_async_chat_azure_with_fallbacks(): print(f"customHandler_fallbacks.states: {customHandler_fallbacks.states}") assert len(customHandler_fallbacks.errors) == 0 assert len(customHandler_fallbacks.states) == 6 # pre, post, failure, pre, post, success + litellm.callbacks = [] except Exception as e: print(f"Assertion Error: {traceback.format_exc()}") pytest.fail(f"An exception occurred - {str(e)}") diff --git a/litellm/tests/test_custom_logger.py b/litellm/tests/test_custom_logger.py index 2df5e0f76..7ae2df729 100644 --- a/litellm/tests/test_custom_logger.py +++ b/litellm/tests/test_custom_logger.py @@ -205,7 +205,6 @@ def test_azure_completion_stream(): assert response_in_success_handler == complete_streaming_response except Exception as e: pytest.fail(f"Error occurred: {e}") -test_azure_completion_stream() def test_async_custom_handler(): try: @@ -316,13 +315,10 @@ def test_redis_cache_completion_stream(): print("\nresponse 2", response_2_content) assert response_1_content == response_2_content, f"Response 1 != Response 2. Same params, Response 1{response_1_content} != Response 2{response_2_content}" litellm.success_callback = [] + litellm._async_success_callback = [] litellm.cache = None except Exception as e: print(e) litellm.success_callback = [] raise e - """ - - 1 & 2 should be exactly the same - """ -# test_redis_cache_completion_stream() \ No newline at end of file + pytest.fail(f"Error occurred: {e}") \ No newline at end of file