From f7e053e3bae77e0de250c12724c68c380f3ba681 Mon Sep 17 00:00:00 2001 From: Ashwin Bharambe Date: Tue, 23 Jul 2024 13:25:40 -0700 Subject: [PATCH] Updates to setup and requirements for PyPI --- README.md | 22 ++++++++++++++-------- requirements.txt | 5 ++--- setup.py | 2 +- 3 files changed, 17 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 448c5a225..cf5031263 100644 --- a/README.md +++ b/README.md @@ -1,23 +1,29 @@ -# llama-toolchain +# llama-toolchain This repo contains the API specifications for various components of the Llama Stack as well implementations for some of those APIs like model inference. -The Stack consists of toolchain-apis and agentic-apis. This repo contains the toolchain-apis +The Stack consists of toolchain-apis and agentic-apis. This repo contains the toolchain-apis + +## Installation + +You can install this repository as a [package](https://pypi.org/project/llama-toolchain/) by just doing `pip install llama-toolchain` + +If you want to install from source: -## Installation and Setup ## ```bash mkdir -p ~/local cd ~/local git clone git@github.com:meta-llama/llama-toolchain.git -conda create -n toolchain python=3.10 +conda create -n toolchain python=3.10 conda activate toolchain cd llama-toolchain pip install -e . ``` -## Test with cli -We have built a llama cli to make it easy to configure / run parts of the toolchain +## Test with cli + +We have built a llama cli to make it easy to configure / run parts of the toolchain ``` llama --help @@ -31,13 +37,13 @@ options: subcommands: {download,inference,model,agentic_system} ``` -There are several subcommands to help get you started +There are several subcommands to help get you started ## Start inference server that can run the llama models ```bash llama inference configure llama inference start -``` +``` ## Test client diff --git a/requirements.txt b/requirements.txt index 2c2fe6787..6387b1c31 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,11 +6,12 @@ fairscale fastapi fire flake8 -huggingface-hub httpx +huggingface-hub hydra-core hydra-zen json-strong-typing +llama_models matplotlib omegaconf pandas @@ -28,5 +29,3 @@ ufmt==2.7.0 usort==1.0.8 uvicorn zmq - -llama_models[llama3_1] @ git+ssh://git@github.com/meta-llama/llama-models.git diff --git a/setup.py b/setup.py index 9191d1468..3fd593ce2 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ def read_requirements(): setup( name="llama_toolchain", - version="0.0.0.1", + version="0.0.1", author="Meta Llama", author_email="llama-oss@meta.com", description="Llama toolchain",