From 21ae94099291a45192c22f73fcd4cc82e6b45a53 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Sun, 5 Nov 2023 21:12:13 -0800 Subject: [PATCH] =?UTF-8?q?bump:=20version=200.13.1=20=E2=86=92=200.13.2.d?= =?UTF-8?q?ev1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- litellm/__init__.py | 1 - litellm/proxy/proxy_cli.py | 2 +- litellm/proxy/proxy_server.py | 19 ++++++++----------- litellm/tests/api_log.json | 18 ++++++++++++++++-- litellm/tests/test_completion.py | 4 ++-- pyproject.toml | 4 ++-- 6 files changed, 29 insertions(+), 19 deletions(-) diff --git a/litellm/__init__.py b/litellm/__init__.py index fcf64f817..9d995c222 100644 --- a/litellm/__init__.py +++ b/litellm/__init__.py @@ -383,4 +383,3 @@ from .exceptions import ( from .budget_manager import BudgetManager from .proxy.proxy_cli import run_server from .router import Router -from .proxy.proxy_server import app diff --git a/litellm/proxy/proxy_cli.py b/litellm/proxy/proxy_cli.py index 59303a9fc..fef243fb9 100644 --- a/litellm/proxy/proxy_cli.py +++ b/litellm/proxy/proxy_cli.py @@ -177,7 +177,7 @@ def run_server(host, port, api_base, api_version, model, alias, add_key, headers if port == 8000 and is_port_in_use(port): port = random.randint(1024, 49152) print(os.listdir(os.getcwd())) - uvicorn.run("litellm:app", host=host, port=port, workers=num_workers) + uvicorn.run("litellm.proxy.proxy_server:app", host=host, port=port, workers=num_workers) if __name__ == "__main__": diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 462eed034..4338aaaed 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -74,17 +74,13 @@ def generate_feedback_box(): print(" Thank you for using LiteLLM! - Krrish & Ishaan") print() print() - - -generate_feedback_box() - -print() -print( - "\033[1;31mGive Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new\033[0m" -) -print() -print("\033[1;34mDocs: https://docs.litellm.ai/docs/proxy_server\033[0m") -print() + print() + print( + "\033[1;31mGive Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new\033[0m" + ) + print() + print("\033[1;34mDocs: https://docs.litellm.ai/docs/proxy_server\033[0m") + print() import litellm litellm.suppress_debug_info = True @@ -360,6 +356,7 @@ def initialize( config ): global user_model, user_api_base, user_debug, user_max_tokens, user_request_timeout, user_temperature, user_telemetry, user_headers, llm_model_list, llm_router, server_settings + generate_feedback_box() user_model = model user_debug = debug dynamic_config = {"general": {}, user_model: {}} diff --git a/litellm/tests/api_log.json b/litellm/tests/api_log.json index 7eeb481d5..354761766 100644 --- a/litellm/tests/api_log.json +++ b/litellm/tests/api_log.json @@ -37,5 +37,19 @@ }, "log_event_type": "pre_api_call" } - } -} \ No newline at end of file + }, + "20231105211050527183": { + "pre_api_call": { + "model": "togethercomputer/llama-2-70b-chat", + "messages": [ + { + "content": "Write a short poem about the sky", + "role": "user" + } + ], + "optional_params": {}, + "litellm_params": { + "return_async": false, + "api_key": null, + "force_timeout": 600, + "logger_fn": \ No newline at end of file diff --git a/litellm/tests/test_completion.py b/litellm/tests/test_completion.py index 340a3b0ae..f29aad38f 100644 --- a/litellm/tests/test_completion.py +++ b/litellm/tests/test_completion.py @@ -36,7 +36,7 @@ def test_completion_custom_provider_model_name(): pytest.fail(f"Error occurred: {e}") -# test_completion_custom_provider_model_name() +test_completion_custom_provider_model_name() def test_completion_claude(): @@ -1384,4 +1384,4 @@ def test_moderation(): print(output) return output -# test_moderation() +# test_moderation() \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 0c10050a0..642c9397d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.13.1" +version = "0.13.2.dev1" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License" @@ -26,7 +26,7 @@ requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" [tool.commitizen] -version = "0.13.1" +version = "0.13.2.dev1" version_files = [ "pyproject.toml:^version" ]