(fix) proxy server LiteLLM warning

This commit is contained in:
ishaan-jaff 2023-11-21 08:50:31 -08:00
parent 8969c5f769
commit 2a35ff88a7

View file

@ -18,10 +18,15 @@ import shutil
telemetry = None telemetry = None
def run_ollama_serve(): def run_ollama_serve():
try:
command = ['ollama', 'serve'] command = ['ollama', 'serve']
with open(os.devnull, 'w') as devnull: with open(os.devnull, 'w') as devnull:
process = subprocess.Popen(command, stdout=devnull, stderr=devnull) process = subprocess.Popen(command, stdout=devnull, stderr=devnull)
except Exception as e:
print(f"""
LiteLLM Warning: proxy started with `ollama` model\n`ollama serve` failed with Exception{e}. \nEnsure you run `ollama serve`
""")
def clone_subfolder(repo_url, subfolder, destination): def clone_subfolder(repo_url, subfolder, destination):
# Clone the full repo # Clone the full repo