From 0a67f3d3e667c65290a35b0ea08600965f356148 Mon Sep 17 00:00:00 2001 From: Hardik Shah Date: Mon, 5 Aug 2024 18:04:36 -0700 Subject: [PATCH] installation fixes --- llama_toolchain/cli/distribution/install.py | 32 +++++++++++++------ .../distribution/install_distribution.sh | 16 +++++++++- llama_toolchain/distribution/registry.py | 2 +- requirements.txt | 14 -------- 4 files changed, 38 insertions(+), 26 deletions(-) diff --git a/llama_toolchain/cli/distribution/install.py b/llama_toolchain/cli/distribution/install.py index 1fde93bca..3679f8786 100644 --- a/llama_toolchain/cli/distribution/install.py +++ b/llama_toolchain/cli/distribution/install.py @@ -7,9 +7,12 @@ import argparse import os import shlex +import textwrap import pkg_resources +from termcolor import cprint + from llama_toolchain.cli.subcommand import Subcommand from llama_toolchain.common.config_dirs import DISTRIBS_BASE_DIR @@ -46,7 +49,7 @@ class DistributionInstall(Subcommand): ) def _run_distribution_install_cmd(self, args: argparse.Namespace) -> None: - from llama_toolchain.common.exec import run_command, run_with_pty + from llama_toolchain.common.exec import run_with_pty from llama_toolchain.distribution.distribution import distribution_dependencies from llama_toolchain.distribution.registry import resolve_distribution @@ -64,17 +67,26 @@ class DistributionInstall(Subcommand): os.makedirs(DISTRIBS_BASE_DIR / dist.name, exist_ok=True) deps = distribution_dependencies(dist) - run_with_pty([script, args.conda_env, " ".join(deps)]) + return_code = run_with_pty([script, args.conda_env, " ".join(deps)]) + + assert return_code == 0, cprint( + f"Failed to install distribution {dist.name}", color="red" + ) + with open(DISTRIBS_BASE_DIR / dist.name / "conda.env", "w") as f: f.write(f"{args.conda_env}\n") - # we need to run configure _within_ the conda environment and need to run with - # a pty since configure is - python_exe = run_command( - shlex.split(f"conda run -n {args.conda_env} which python") - ).strip() - run_with_pty( - shlex.split( - f"{python_exe} -m llama_toolchain.cli.llama distribution configure --name {dist.name}" + cprint( + f"Distribution `{dist.name}` has been installed successfully!", + color="green", + ) + print( + textwrap.dedent( + f""" + Update your conda environment and configure this distribution by running: + + conda deactivate && conda activate {args.conda_env} + llama distribution configure --name {dist.name} + """ ) ) diff --git a/llama_toolchain/distribution/install_distribution.sh b/llama_toolchain/distribution/install_distribution.sh index 0707f4d6b..975c205d0 100755 --- a/llama_toolchain/distribution/install_distribution.sh +++ b/llama_toolchain/distribution/install_distribution.sh @@ -8,6 +8,10 @@ set -euo pipefail +# Define color codes +RED='\033[0;31m' +NC='\033[0m' # No Color + error_handler() { echo "Error occurred in script at line: ${1}" >&2 exit 1 @@ -23,7 +27,7 @@ ensure_conda_env_python310() { # Check if conda command is available if ! command -v conda &>/dev/null; then - echo "Error: conda command not found. Is Conda installed and in your PATH?" >&2 + echo -e "${RED}Error: conda command not found. Is Conda installed and in your PATH?${NC}" >&2 exit 1 fi @@ -50,6 +54,16 @@ ensure_conda_env_python310() { echo "Installing pip dependencies: $pip_dependencies" conda run -n "${env_name}" pip install $pip_dependencies fi + + # Re-installing llama-toolchain in the new conda environment + if git rev-parse --is-inside-work-tree &> /dev/null; then + repo_root=$(git rev-parse --show-toplevel) + cd "$repo_root" + conda run -n "${env_name}" pip install -e . + else + echo -e "${RED}Not inside a Git repository. Please re-run from within llama-toolchain repository.${NC}" + exit 1 + fi } if [ "$#" -ne 2 ]; then diff --git a/llama_toolchain/distribution/registry.py b/llama_toolchain/distribution/registry.py index ea046f5a1..54ff74c62 100644 --- a/llama_toolchain/distribution/registry.py +++ b/llama_toolchain/distribution/registry.py @@ -29,7 +29,7 @@ COMMON_DEPENDENCIES = [ "httpx", "huggingface-hub", "json-strong-typing", - "git+ssh://git@github.com/meta-llama/llama-models.git", + "llama-models", "omegaconf", "pandas", "Pillow", diff --git a/requirements.txt b/requirements.txt index fa78213bf..726233036 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,30 +1,16 @@ -accelerate black==24.4.2 -blobfile -codeshield -fairscale fastapi fire flake8 httpx huggingface-hub json-strong-typing -llama-models -matplotlib -ollama omegaconf -pandas -Pillow pre-commit pydantic==1.10.13 pydantic_core==2.18.2 -python-dotenv python-openapi requests -tiktoken -torch -transformers ufmt==2.7.0 usort==1.0.8 uvicorn -zmq