fix(main.py): misrouting ollama models to nlp cloud

This commit is contained in:
Krrish Dholakia 2023-11-14 18:55:01 -08:00
parent 465f427465
commit 1738341dcb
5 changed files with 94 additions and 47 deletions

View file

@ -113,7 +113,6 @@ def run_server(host, port, api_base, api_version, model, alias, add_key, headers
print("\033[1;32mDone successfully\033[0m")
return
if model and "ollama" in model:
print(f"ollama called")
run_ollama_serve()
if test != False:
click.echo('\nLiteLLM: Making a test ChatCompletions request to your proxy')