From 01fad944855a548c935d7ae9d54f4e67664b7d09 Mon Sep 17 00:00:00 2001 From: Kumaran Rajendhiran Date: Fri, 24 Nov 2023 16:52:55 +0530 Subject: [PATCH] Fail gracefully if ollama is already being served --- litellm/proxy/proxy_server.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 1d1931fa74..1299318e25 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -227,10 +227,15 @@ def celery_setup(use_queue: bool): celery_app_conn = celery_app def run_ollama_serve(): - command = ['ollama', 'serve'] - - with open(os.devnull, 'w') as devnull: - process = subprocess.Popen(command, stdout=devnull, stderr=devnull) + try: + command = ['ollama', 'serve'] + + with open(os.devnull, 'w') as devnull: + process = subprocess.Popen(command, stdout=devnull, stderr=devnull) + except Exception as e: + print(f""" + LiteLLM Warning: proxy started with `ollama` model\n`ollama serve` failed with Exception{e}. \nEnsure you run `ollama serve` + """) def load_router_config(router: Optional[litellm.Router], config_file_path: str): global master_key