installation fixes

This commit is contained in:
Hardik Shah 2024-08-05 18:04:36 -07:00
parent 0de5a807c7
commit 0a67f3d3e6
4 changed files with 38 additions and 26 deletions

View file

@ -7,9 +7,12 @@
import argparse
import os
import shlex
import textwrap
import pkg_resources
from termcolor import cprint
from llama_toolchain.cli.subcommand import Subcommand
from llama_toolchain.common.config_dirs import DISTRIBS_BASE_DIR
@ -46,7 +49,7 @@ class DistributionInstall(Subcommand):
)
def _run_distribution_install_cmd(self, args: argparse.Namespace) -> None:
from llama_toolchain.common.exec import run_command, run_with_pty
from llama_toolchain.common.exec import run_with_pty
from llama_toolchain.distribution.distribution import distribution_dependencies
from llama_toolchain.distribution.registry import resolve_distribution
@ -64,17 +67,26 @@ class DistributionInstall(Subcommand):
os.makedirs(DISTRIBS_BASE_DIR / dist.name, exist_ok=True)
deps = distribution_dependencies(dist)
run_with_pty([script, args.conda_env, " ".join(deps)])
return_code = run_with_pty([script, args.conda_env, " ".join(deps)])
assert return_code == 0, cprint(
f"Failed to install distribution {dist.name}", color="red"
)
with open(DISTRIBS_BASE_DIR / dist.name / "conda.env", "w") as f:
f.write(f"{args.conda_env}\n")
# we need to run configure _within_ the conda environment and need to run with
# a pty since configure is
python_exe = run_command(
shlex.split(f"conda run -n {args.conda_env} which python")
).strip()
run_with_pty(
shlex.split(
f"{python_exe} -m llama_toolchain.cli.llama distribution configure --name {dist.name}"
cprint(
f"Distribution `{dist.name}` has been installed successfully!",
color="green",
)
print(
textwrap.dedent(
f"""
Update your conda environment and configure this distribution by running:
conda deactivate && conda activate {args.conda_env}
llama distribution configure --name {dist.name}
"""
)
)

View file

@ -8,6 +8,10 @@
set -euo pipefail
# Define color codes
RED='\033[0;31m'
NC='\033[0m' # No Color
error_handler() {
echo "Error occurred in script at line: ${1}" >&2
exit 1
@ -23,7 +27,7 @@ ensure_conda_env_python310() {
# Check if conda command is available
if ! command -v conda &>/dev/null; then
echo "Error: conda command not found. Is Conda installed and in your PATH?" >&2
echo -e "${RED}Error: conda command not found. Is Conda installed and in your PATH?${NC}" >&2
exit 1
fi
@ -50,6 +54,16 @@ ensure_conda_env_python310() {
echo "Installing pip dependencies: $pip_dependencies"
conda run -n "${env_name}" pip install $pip_dependencies
fi
# Re-installing llama-toolchain in the new conda environment
if git rev-parse --is-inside-work-tree &> /dev/null; then
repo_root=$(git rev-parse --show-toplevel)
cd "$repo_root"
conda run -n "${env_name}" pip install -e .
else
echo -e "${RED}Not inside a Git repository. Please re-run from within llama-toolchain repository.${NC}"
exit 1
fi
}
if [ "$#" -ne 2 ]; then

View file

@ -29,7 +29,7 @@ COMMON_DEPENDENCIES = [
"httpx",
"huggingface-hub",
"json-strong-typing",
"git+ssh://git@github.com/meta-llama/llama-models.git",
"llama-models",
"omegaconf",
"pandas",
"Pillow",

View file

@ -1,30 +1,16 @@
accelerate
black==24.4.2
blobfile
codeshield
fairscale
fastapi
fire
flake8
httpx
huggingface-hub
json-strong-typing
llama-models
matplotlib
ollama
omegaconf
pandas
Pillow
pre-commit
pydantic==1.10.13
pydantic_core==2.18.2
python-dotenv
python-openapi
requests
tiktoken
torch
transformers
ufmt==2.7.0
usort==1.0.8
uvicorn
zmq