diff --git a/litellm/__init__.py b/litellm/__init__.py index 2d5618bcd..45d768c33 100644 --- a/litellm/__init__.py +++ b/litellm/__init__.py @@ -1,3 +1,54 @@ +### GET HELP/SUPPORT MESSAGE ### +import shutil, os +show_get_help_message = True +def print_box(message, link): + # get the longest line to set the box width + max_length = max(len(message), len(link)) + + # get terminal size + columns, rows = shutil.get_terminal_size(fallback=(80, 24)) + + # set the width and height to 50% of terminal size + width = max_length + 4 # adding 4 to consider the padding spaces and border + height = 20 + + # define left and top margins for centering the box + left_margin = (columns - width) // 2 + top_margin = (rows - height) // 2 + + # create box border lines + horizontal_line = "+" + "-" * (width - 2) + "+" + empty_line = "|" + " " * (width - 2) + "|" + + # print top margin + for _ in range(top_margin): + print() + + # print top border + print(" " * left_margin + horizontal_line) + + # print padding and message + print(" " * left_margin + empty_line) + print(" " * left_margin + empty_line) + print(" " * left_margin + "| " + message.center(max_length) + " |") + print(" " * left_margin + "| " + link.center(max_length) + " |") + print(" " * left_margin + empty_line) + print(" " * left_margin + empty_line) + + # print bottom border + print(" " * left_margin + horizontal_line) + + # print bottom margin + for _ in range(rows - height - top_margin): + print() + + +message = "Add new LLMs: Give Feedback / Get Help" +link = "https://github.com/BerriAI/litellm/issues/new" +if show_get_help_message and not os.path.isfile('litellm_uuid.txt'): # only show once + print_box(message, link) + +### INIT VARIABLES ### import threading, requests from typing import Callable, List, Optional, Dict, Union from litellm.caching import Cache diff --git a/litellm/__pycache__/__init__.cpython-311.pyc b/litellm/__pycache__/__init__.cpython-311.pyc index 5f5a1a503..05a7c09e2 100644 Binary files a/litellm/__pycache__/__init__.cpython-311.pyc and b/litellm/__pycache__/__init__.cpython-311.pyc differ diff --git a/litellm/__pycache__/main.cpython-311.pyc b/litellm/__pycache__/main.cpython-311.pyc index b395309b6..396f89a3b 100644 Binary files a/litellm/__pycache__/main.cpython-311.pyc and b/litellm/__pycache__/main.cpython-311.pyc differ diff --git a/litellm/__pycache__/timeout.cpython-311.pyc b/litellm/__pycache__/timeout.cpython-311.pyc index 1f3bbe0d8..71dcdd313 100644 Binary files a/litellm/__pycache__/timeout.cpython-311.pyc and b/litellm/__pycache__/timeout.cpython-311.pyc differ diff --git a/litellm/__pycache__/utils.cpython-311.pyc b/litellm/__pycache__/utils.cpython-311.pyc index 5f546c9e3..e0f896c0d 100644 Binary files a/litellm/__pycache__/utils.cpython-311.pyc and b/litellm/__pycache__/utils.cpython-311.pyc differ diff --git a/litellm/tests/test_completion.py b/litellm/tests/test_completion.py index 980aa14f2..dad571310 100644 --- a/litellm/tests/test_completion.py +++ b/litellm/tests/test_completion.py @@ -15,7 +15,7 @@ from litellm import embedding, completion, text_completion, completion_cost user_message = "Write a short poem about the sky" messages = [{"content": user_message, "role": "user"}] - +print(f"os path: {os.path.isfile('litellm_uuid.txt')}") def logger_fn(user_model_dict): print(f"user_model_dict: {user_model_dict}") @@ -48,7 +48,7 @@ def test_completion_claude(): print(response.response_ms) except Exception as e: pytest.fail(f"Error occurred: {e}") -# test_completion_claude() +test_completion_claude() def test_completion_claude_max_tokens(): try: @@ -67,7 +67,7 @@ def test_completion_claude_max_tokens(): print(response.response_ms) except Exception as e: pytest.fail(f"Error occurred: {e}") -# test_completion_claude_max_tokens() +test_completion_claude_max_tokens() # def test_completion_oobabooga(): # try: diff --git a/pyproject.toml b/pyproject.toml index 61e48ccda..4f2fe6b6b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.1.746" +version = "0.1.747" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License"