bump: version 0.13.1 → 0.13.2.dev1

This commit is contained in:
Krrish Dholakia 2023-11-05 21:12:13 -08:00
parent 2e72efddd6
commit 21ae940992
6 changed files with 29 additions and 19 deletions

View file

@ -383,4 +383,3 @@ from .exceptions import (
from .budget_manager import BudgetManager from .budget_manager import BudgetManager
from .proxy.proxy_cli import run_server from .proxy.proxy_cli import run_server
from .router import Router from .router import Router
from .proxy.proxy_server import app

View file

@ -177,7 +177,7 @@ def run_server(host, port, api_base, api_version, model, alias, add_key, headers
if port == 8000 and is_port_in_use(port): if port == 8000 and is_port_in_use(port):
port = random.randint(1024, 49152) port = random.randint(1024, 49152)
print(os.listdir(os.getcwd())) print(os.listdir(os.getcwd()))
uvicorn.run("litellm:app", host=host, port=port, workers=num_workers) uvicorn.run("litellm.proxy.proxy_server:app", host=host, port=port, workers=num_workers)
if __name__ == "__main__": if __name__ == "__main__":

View file

@ -74,10 +74,6 @@ def generate_feedback_box():
print(" Thank you for using LiteLLM! - Krrish & Ishaan") print(" Thank you for using LiteLLM! - Krrish & Ishaan")
print() print()
print() print()
generate_feedback_box()
print() print()
print( print(
"\033[1;31mGive Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new\033[0m" "\033[1;31mGive Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new\033[0m"
@ -360,6 +356,7 @@ def initialize(
config config
): ):
global user_model, user_api_base, user_debug, user_max_tokens, user_request_timeout, user_temperature, user_telemetry, user_headers, llm_model_list, llm_router, server_settings global user_model, user_api_base, user_debug, user_max_tokens, user_request_timeout, user_temperature, user_telemetry, user_headers, llm_model_list, llm_router, server_settings
generate_feedback_box()
user_model = model user_model = model
user_debug = debug user_debug = debug
dynamic_config = {"general": {}, user_model: {}} dynamic_config = {"general": {}, user_model: {}}

View file

@ -37,5 +37,19 @@
}, },
"log_event_type": "pre_api_call" "log_event_type": "pre_api_call"
} }
},
"20231105211050527183": {
"pre_api_call": {
"model": "togethercomputer/llama-2-70b-chat",
"messages": [
{
"content": "Write a short poem about the sky",
"role": "user"
} }
} ],
"optional_params": {},
"litellm_params": {
"return_async": false,
"api_key": null,
"force_timeout": 600,
"logger_fn":

View file

@ -36,7 +36,7 @@ def test_completion_custom_provider_model_name():
pytest.fail(f"Error occurred: {e}") pytest.fail(f"Error occurred: {e}")
# test_completion_custom_provider_model_name() test_completion_custom_provider_model_name()
def test_completion_claude(): def test_completion_claude():

View file

@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "litellm" name = "litellm"
version = "0.13.1" version = "0.13.2.dev1"
description = "Library to easily interface with LLM API providers" description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"] authors = ["BerriAI"]
license = "MIT License" license = "MIT License"
@ -26,7 +26,7 @@ requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api" build-backend = "poetry.core.masonry.api"
[tool.commitizen] [tool.commitizen]
version = "0.13.1" version = "0.13.2.dev1"
version_files = [ version_files = [
"pyproject.toml:^version" "pyproject.toml:^version"
] ]