Fail gracefully if ollama is already being served

This commit is contained in:
Kumaran Rajendhiran 2023-11-24 16:52:55 +05:30
parent 824136667f
commit 01fad94485

View file

@ -227,10 +227,15 @@ def celery_setup(use_queue: bool):
celery_app_conn = celery_app
def run_ollama_serve():
command = ['ollama', 'serve']
with open(os.devnull, 'w') as devnull:
process = subprocess.Popen(command, stdout=devnull, stderr=devnull)
try:
command = ['ollama', 'serve']
with open(os.devnull, 'w') as devnull:
process = subprocess.Popen(command, stdout=devnull, stderr=devnull)
except Exception as e:
print(f"""
LiteLLM Warning: proxy started with `ollama` model\n`ollama serve` failed with Exception{e}. \nEnsure you run `ollama serve`
""")
def load_router_config(router: Optional[litellm.Router], config_file_path: str):
global master_key