mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-29 15:23:51 +00:00
installation fixes
This commit is contained in:
parent
0de5a807c7
commit
0a67f3d3e6
4 changed files with 38 additions and 26 deletions
|
@ -7,9 +7,12 @@
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import shlex
|
import shlex
|
||||||
|
import textwrap
|
||||||
|
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
|
|
||||||
|
from termcolor import cprint
|
||||||
|
|
||||||
from llama_toolchain.cli.subcommand import Subcommand
|
from llama_toolchain.cli.subcommand import Subcommand
|
||||||
from llama_toolchain.common.config_dirs import DISTRIBS_BASE_DIR
|
from llama_toolchain.common.config_dirs import DISTRIBS_BASE_DIR
|
||||||
|
|
||||||
|
@ -46,7 +49,7 @@ class DistributionInstall(Subcommand):
|
||||||
)
|
)
|
||||||
|
|
||||||
def _run_distribution_install_cmd(self, args: argparse.Namespace) -> None:
|
def _run_distribution_install_cmd(self, args: argparse.Namespace) -> None:
|
||||||
from llama_toolchain.common.exec import run_command, run_with_pty
|
from llama_toolchain.common.exec import run_with_pty
|
||||||
from llama_toolchain.distribution.distribution import distribution_dependencies
|
from llama_toolchain.distribution.distribution import distribution_dependencies
|
||||||
from llama_toolchain.distribution.registry import resolve_distribution
|
from llama_toolchain.distribution.registry import resolve_distribution
|
||||||
|
|
||||||
|
@ -64,17 +67,26 @@ class DistributionInstall(Subcommand):
|
||||||
os.makedirs(DISTRIBS_BASE_DIR / dist.name, exist_ok=True)
|
os.makedirs(DISTRIBS_BASE_DIR / dist.name, exist_ok=True)
|
||||||
|
|
||||||
deps = distribution_dependencies(dist)
|
deps = distribution_dependencies(dist)
|
||||||
run_with_pty([script, args.conda_env, " ".join(deps)])
|
return_code = run_with_pty([script, args.conda_env, " ".join(deps)])
|
||||||
|
|
||||||
|
assert return_code == 0, cprint(
|
||||||
|
f"Failed to install distribution {dist.name}", color="red"
|
||||||
|
)
|
||||||
|
|
||||||
with open(DISTRIBS_BASE_DIR / dist.name / "conda.env", "w") as f:
|
with open(DISTRIBS_BASE_DIR / dist.name / "conda.env", "w") as f:
|
||||||
f.write(f"{args.conda_env}\n")
|
f.write(f"{args.conda_env}\n")
|
||||||
|
|
||||||
# we need to run configure _within_ the conda environment and need to run with
|
cprint(
|
||||||
# a pty since configure is
|
f"Distribution `{dist.name}` has been installed successfully!",
|
||||||
python_exe = run_command(
|
color="green",
|
||||||
shlex.split(f"conda run -n {args.conda_env} which python")
|
)
|
||||||
).strip()
|
print(
|
||||||
run_with_pty(
|
textwrap.dedent(
|
||||||
shlex.split(
|
f"""
|
||||||
f"{python_exe} -m llama_toolchain.cli.llama distribution configure --name {dist.name}"
|
Update your conda environment and configure this distribution by running:
|
||||||
|
|
||||||
|
conda deactivate && conda activate {args.conda_env}
|
||||||
|
llama distribution configure --name {dist.name}
|
||||||
|
"""
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
|
@ -8,6 +8,10 @@
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Define color codes
|
||||||
|
RED='\033[0;31m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
error_handler() {
|
error_handler() {
|
||||||
echo "Error occurred in script at line: ${1}" >&2
|
echo "Error occurred in script at line: ${1}" >&2
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -23,7 +27,7 @@ ensure_conda_env_python310() {
|
||||||
|
|
||||||
# Check if conda command is available
|
# Check if conda command is available
|
||||||
if ! command -v conda &>/dev/null; then
|
if ! command -v conda &>/dev/null; then
|
||||||
echo "Error: conda command not found. Is Conda installed and in your PATH?" >&2
|
echo -e "${RED}Error: conda command not found. Is Conda installed and in your PATH?${NC}" >&2
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -50,6 +54,16 @@ ensure_conda_env_python310() {
|
||||||
echo "Installing pip dependencies: $pip_dependencies"
|
echo "Installing pip dependencies: $pip_dependencies"
|
||||||
conda run -n "${env_name}" pip install $pip_dependencies
|
conda run -n "${env_name}" pip install $pip_dependencies
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Re-installing llama-toolchain in the new conda environment
|
||||||
|
if git rev-parse --is-inside-work-tree &> /dev/null; then
|
||||||
|
repo_root=$(git rev-parse --show-toplevel)
|
||||||
|
cd "$repo_root"
|
||||||
|
conda run -n "${env_name}" pip install -e .
|
||||||
|
else
|
||||||
|
echo -e "${RED}Not inside a Git repository. Please re-run from within llama-toolchain repository.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
if [ "$#" -ne 2 ]; then
|
if [ "$#" -ne 2 ]; then
|
||||||
|
|
|
@ -29,7 +29,7 @@ COMMON_DEPENDENCIES = [
|
||||||
"httpx",
|
"httpx",
|
||||||
"huggingface-hub",
|
"huggingface-hub",
|
||||||
"json-strong-typing",
|
"json-strong-typing",
|
||||||
"git+ssh://git@github.com/meta-llama/llama-models.git",
|
"llama-models",
|
||||||
"omegaconf",
|
"omegaconf",
|
||||||
"pandas",
|
"pandas",
|
||||||
"Pillow",
|
"Pillow",
|
||||||
|
|
|
@ -1,30 +1,16 @@
|
||||||
accelerate
|
|
||||||
black==24.4.2
|
black==24.4.2
|
||||||
blobfile
|
|
||||||
codeshield
|
|
||||||
fairscale
|
|
||||||
fastapi
|
fastapi
|
||||||
fire
|
fire
|
||||||
flake8
|
flake8
|
||||||
httpx
|
httpx
|
||||||
huggingface-hub
|
huggingface-hub
|
||||||
json-strong-typing
|
json-strong-typing
|
||||||
llama-models
|
|
||||||
matplotlib
|
|
||||||
ollama
|
|
||||||
omegaconf
|
omegaconf
|
||||||
pandas
|
|
||||||
Pillow
|
|
||||||
pre-commit
|
pre-commit
|
||||||
pydantic==1.10.13
|
pydantic==1.10.13
|
||||||
pydantic_core==2.18.2
|
pydantic_core==2.18.2
|
||||||
python-dotenv
|
|
||||||
python-openapi
|
python-openapi
|
||||||
requests
|
requests
|
||||||
tiktoken
|
|
||||||
torch
|
|
||||||
transformers
|
|
||||||
ufmt==2.7.0
|
ufmt==2.7.0
|
||||||
usort==1.0.8
|
usort==1.0.8
|
||||||
uvicorn
|
uvicorn
|
||||||
zmq
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue