update dependencies and rely on LLAMA_TOOLCHAIN_DIR for dev purposes

This commit is contained in:
Hardik Shah 2024-08-08 08:22:13 -07:00
parent 72b20b6f5a
commit ab856c174c
5 changed files with 11 additions and 8 deletions

View file

@ -16,6 +16,7 @@ def available_agentic_system_providers() -> List[ProviderSpec]:
provider_id="meta-reference",
pip_packages=[
"codeshield",
"pillow",
"torch",
"transformers",
],

View file

@ -54,10 +54,14 @@ ensure_conda_env_python310() {
conda deactivate && conda activate "${env_name}"
# Re-installing llama-toolchain in the new conda environment
if git rev-parse --is-inside-work-tree &>/dev/null; then
repo_root=$(git rev-parse --show-toplevel)
cd "$repo_root"
pip install -e .
if [ -n "$LLAMA_TOOLCHAIN_DIR" ]; then
if [ ! -d "$LLAMA_TOOLCHAIN_DIR" ]; then
echo -e "${RED}Warning: LLAMA_TOOLCHAIN_DIR is set but directory does not exist: $LLAMA_TOOLCHAIN_DIR${NC}" >&2
exit 1
fi
echo "Installing from LLAMA_TOOLCHAIN_DIR: $LLAMA_TOOLCHAIN_DIR"
pip install -e "$LLAMA_TOOLCHAIN_DIR"
else
pip install llama-toolchain
fi

View file

@ -66,10 +66,7 @@ class OllamaInference(Inference):
try:
await self.client.ps()
except httpx.ConnectError:
print(
"Ollama Server is not running, start it using `ollama serve` in a separate terminal"
)
raise
raise RuntimeError("Ollama Server is not running, start it using `ollama serve` in a separate terminal")
async def shutdown(self) -> None:
pass

View file

@ -15,6 +15,7 @@ def available_safety_providers() -> List[ProviderSpec]:
api=Api.safety,
provider_id="meta-reference",
pip_packages=[
"accelerate",
"codeshield",
"torch",
"transformers",