fix(proxy_cli.py): check if model passed in

This commit is contained in:
Krrish Dholakia 2023-10-07 07:51:36 -07:00
parent fbd3fa122d
commit f0c9c24925
2 changed files with 1 additions and 1 deletions

View file

@ -88,7 +88,7 @@ def run_server(port, api_base, model, deploy, debug, temperature, max_tokens, te
print(f"\033[32mLiteLLM: Test your URL using the following: \"litellm --test {url}\"\033[0m")
return
if "ollama" in model:
if model and "ollama" in model:
run_ollama_serve()
if test != False:
click.echo('LiteLLM: Making a test ChatCompletions request to your proxy')