From f85b64b85cb3962a8851c2da00155239728b9adf Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Fri, 12 Jan 2024 17:15:15 -0800 Subject: [PATCH] (test) custom_llm_provider in streaming response --- litellm/tests/test_streaming.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/litellm/tests/test_streaming.py b/litellm/tests/test_streaming.py index 0e80c55ed9..81d0815d69 100644 --- a/litellm/tests/test_streaming.py +++ b/litellm/tests/test_streaming.py @@ -262,6 +262,9 @@ def test_completion_azure_stream(): for idx, init_chunk in enumerate(response): chunk, finished = streaming_format_tests(idx, init_chunk) complete_response += chunk + custom_llm_provider = init_chunk._hidden_params["custom_llm_provider"] + print(f"custom_llm_provider: {custom_llm_provider}") + assert custom_llm_provider == "azure" if finished: assert isinstance(init_chunk.choices[0], litellm.utils.StreamingChoices) break @@ -923,6 +926,7 @@ def ai21_completion_call_bad_key(): # ai21_completion_call_bad_key() + @pytest.mark.asyncio async def test_hf_completion_tgi_stream(): try: