From 4059f408d0fd4438f83cccaa26644fb282a10c86 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Mon, 9 Oct 2023 13:10:07 -0700 Subject: [PATCH] fix(proxy_cli): accept drop params and add_function_to_prompt --- litellm/proxy/proxy_cli.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/litellm/proxy/proxy_cli.py b/litellm/proxy/proxy_cli.py index b39533e781..c732fcbe78 100644 --- a/litellm/proxy/proxy_cli.py +++ b/litellm/proxy/proxy_cli.py @@ -73,7 +73,7 @@ def open_config(): @click.option('--config', is_flag=True, help='Create and open .env file from .env.template') @click.option('--test', flag_value=True, help='proxy chat completions url to make a test request to') @click.option('--local', is_flag=True, default=False, help='for local debugging') -def run_server(host, port, api_base, model, deploy, debug, temperature, max_tokens, telemetry, config, test, local): +def run_server(host, port, api_base, model, deploy, debug, temperature, max_tokens, drop_params, add_function_to_prompt, telemetry, config, test, local): if config: open_config() @@ -127,7 +127,7 @@ def run_server(host, port, api_base, model, deploy, debug, temperature, max_toke return else: load_config() - initialize(model, api_base, debug, temperature, max_tokens, telemetry, ) + initialize(model, api_base, debug, temperature, max_tokens, telemetry, drop_params, add_function_to_prompt) try: