From cd0557c0532bc95706dbd43cc411fca90a09d63a Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Sat, 14 Oct 2023 12:32:32 -0700 Subject: [PATCH] (feat) add swagger docs to cli config --- litellm/proxy/proxy_cli.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/litellm/proxy/proxy_cli.py b/litellm/proxy/proxy_cli.py index d8bbc9dace..e1f3fc5879 100644 --- a/litellm/proxy/proxy_cli.py +++ b/litellm/proxy/proxy_cli.py @@ -105,7 +105,7 @@ def is_port_in_use(port): @click.option('--test', flag_value=True, help='proxy chat completions url to make a test request to') @click.option('--local', is_flag=True, default=False, help='for local debugging') @click.option('--cost', is_flag=True, default=False, help='for viewing cost logs') -def run_server(host, port, api_base, model, add_key, deploy, debug, temperature, max_tokens, drop_params, create_proxy, add_function_to_prompt, config, file, max_budget, telemetry, logs, test, local, cost): +def run_server(host, port, api_base, model, add_key, deploy, debug, temperature, max_tokens, drop_params, create_proxy, add_function_to_prompt, config, file, max_budget, telemetry, logs, test, local, cost, save): global feature_telemetry args = locals() if local: @@ -206,6 +206,7 @@ def run_server(host, port, api_base, model, add_key, deploy, debug, temperature, except: raise ImportError("Uvicorn needs to be imported. Run - `pip install uvicorn`") print(f"\033[32mLiteLLM: Test your local endpoint with: \"litellm --test\" [In a new terminal tab]\033[0m\n") + print(f"\033[32mLiteLLM: View available endpoints for this server on: http://{host}:{port}\033[0m\n") print(f"\033[32mLiteLLM: Deploy your proxy using the following: \"litellm --model claude-instant-1 --deploy\" Get an https://api.litellm.ai/chat/completions endpoint \033[0m\n") if port == 8000 and is_port_in_use(port):