From bccbb0852d614f300914e60e4b246a810728f9c4 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Mon, 22 Jan 2024 21:57:26 -0800 Subject: [PATCH] (test) test_completion_sagemaker_stream --- litellm/tests/test_completion.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/litellm/tests/test_completion.py b/litellm/tests/test_completion.py index 644b348ec..43ffd2b0a 100644 --- a/litellm/tests/test_completion.py +++ b/litellm/tests/test_completion.py @@ -1394,6 +1394,30 @@ def test_completion_sagemaker(): # test_completion_sagemaker() +def test_completion_sagemaker_stream(): + try: + litellm.set_verbose = False + print("testing sagemaker") + response = completion( + model="sagemaker/berri-benchmarking-Llama-2-70b-chat-hf-4", + messages=messages, + temperature=0.2, + max_tokens=80, + stream=True, + ) + + complete_streaming_response = "" + + for chunk in response: + print(chunk) + complete_streaming_response += chunk.choices[0].delta.content or "" + # Add any assertions here to check the response + # print(response) + assert len(complete_streaming_response) > 0 + except Exception as e: + pytest.fail(f"Error occurred: {e}") + + def test_completion_chat_sagemaker(): try: messages = [{"role": "user", "content": "Hey, how's it going?"}]