mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-29 15:23:51 +00:00
update dependencies and rely on LLAMA_TOOLCHAIN_DIR for dev purposes
This commit is contained in:
parent
72b20b6f5a
commit
ab856c174c
5 changed files with 11 additions and 8 deletions
0
llama_toolchain/agentic_system/__init__.py
Normal file
0
llama_toolchain/agentic_system/__init__.py
Normal file
|
@ -16,6 +16,7 @@ def available_agentic_system_providers() -> List[ProviderSpec]:
|
||||||
provider_id="meta-reference",
|
provider_id="meta-reference",
|
||||||
pip_packages=[
|
pip_packages=[
|
||||||
"codeshield",
|
"codeshield",
|
||||||
|
"pillow",
|
||||||
"torch",
|
"torch",
|
||||||
"transformers",
|
"transformers",
|
||||||
],
|
],
|
||||||
|
|
|
@ -54,10 +54,14 @@ ensure_conda_env_python310() {
|
||||||
conda deactivate && conda activate "${env_name}"
|
conda deactivate && conda activate "${env_name}"
|
||||||
|
|
||||||
# Re-installing llama-toolchain in the new conda environment
|
# Re-installing llama-toolchain in the new conda environment
|
||||||
if git rev-parse --is-inside-work-tree &>/dev/null; then
|
if [ -n "$LLAMA_TOOLCHAIN_DIR" ]; then
|
||||||
repo_root=$(git rev-parse --show-toplevel)
|
if [ ! -d "$LLAMA_TOOLCHAIN_DIR" ]; then
|
||||||
cd "$repo_root"
|
echo -e "${RED}Warning: LLAMA_TOOLCHAIN_DIR is set but directory does not exist: $LLAMA_TOOLCHAIN_DIR${NC}" >&2
|
||||||
pip install -e .
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Installing from LLAMA_TOOLCHAIN_DIR: $LLAMA_TOOLCHAIN_DIR"
|
||||||
|
pip install -e "$LLAMA_TOOLCHAIN_DIR"
|
||||||
else
|
else
|
||||||
pip install llama-toolchain
|
pip install llama-toolchain
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -66,10 +66,7 @@ class OllamaInference(Inference):
|
||||||
try:
|
try:
|
||||||
await self.client.ps()
|
await self.client.ps()
|
||||||
except httpx.ConnectError:
|
except httpx.ConnectError:
|
||||||
print(
|
raise RuntimeError("Ollama Server is not running, start it using `ollama serve` in a separate terminal")
|
||||||
"Ollama Server is not running, start it using `ollama serve` in a separate terminal"
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
|
|
||||||
async def shutdown(self) -> None:
|
async def shutdown(self) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -15,6 +15,7 @@ def available_safety_providers() -> List[ProviderSpec]:
|
||||||
api=Api.safety,
|
api=Api.safety,
|
||||||
provider_id="meta-reference",
|
provider_id="meta-reference",
|
||||||
pip_packages=[
|
pip_packages=[
|
||||||
|
"accelerate",
|
||||||
"codeshield",
|
"codeshield",
|
||||||
"torch",
|
"torch",
|
||||||
"transformers",
|
"transformers",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue