diff --git a/fastrepl-proxy b/fastrepl-proxy deleted file mode 160000 index f2fe83e00..000000000 --- a/fastrepl-proxy +++ /dev/null @@ -1 +0,0 @@ -Subproject commit f2fe83e002a7c3ddedf4e500665644adfd31b9fc diff --git a/litellm/main.py b/litellm/main.py index 23cfb2c58..2d946283e 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -1123,3 +1123,6 @@ def config_completion(**kwargs): raise ValueError( "No config path set, please set a config path using `litellm.config_path = 'path/to/config.json'`" ) + +def stream_chunk_builder(chunks:list): + pass \ No newline at end of file diff --git a/litellm/tests/test_stream_chunk_builder.py b/litellm/tests/test_stream_chunk_builder.py new file mode 100644 index 000000000..fbfc78554 --- /dev/null +++ b/litellm/tests/test_stream_chunk_builder.py @@ -0,0 +1,18 @@ +from litellm import completion +import litellm +import os + +user_message = "Write a short poem about the sky" +messages = [{"content": user_message, "role": "user"}] + +def test_stream_chunk_builder(): + litellm.api_key = os.environ["OPENAI_API_KEY"] + response = completion( + model="gpt-3.5-turbo", + messages=messages, + stream=True + ) + + for chunk in response: + print(chunk) +test_stream_chunk_builder()