chore: update doc

# What does this PR do?


## Test Plan
This commit is contained in:
Eric Huang 2025-10-19 21:13:40 -07:00
parent a9ff8c5c9f
commit e0dda3bb06
23 changed files with 6409 additions and 6390 deletions

View file

@ -2864,7 +2864,8 @@
}
],
"source": [
"!llama stack build --distro experimental-post-training --image-type venv --image-name __system__"
"!uv pip install llama-stack\n",
"llama stack list-deps experimental-post-training | xargs -L1 uv pip install --image-name __system__\n"
]
},
{
@ -6365,4 +6366,4 @@
},
"nbformat": 4,
"nbformat_minor": 0
}
}

View file

@ -38,7 +38,8 @@
"source": [
"# NBVAL_SKIP\n",
"!pip install -U llama-stack\n",
"!UV_SYSTEM_PYTHON=1 llama stack build --distro fireworks --image-type venv"
"!UV_SYSTEM_PYTHON=1 uv pip install llama-stack\n",
"llama stack list-deps fireworks | xargs -L1 uv pip install\n"
]
},
{
@ -3531,4 +3532,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}

File diff suppressed because it is too large Load diff

View file

@ -136,7 +136,9 @@
" \"\"\"Build and run LlamaStack server in one step using --run flag\"\"\"\n",
" log_file = open(\"llama_stack_server.log\", \"w\")\n",
" process = subprocess.Popen(\n",
" \"uv run --with llama-stack llama stack build --distro starter --image-type venv --run\",\n",
" \"uv pip install llama-stack\n",
"llama stack list-deps starter | xargs -L1 uv pip install\n",
"llama stack run starter --image-type venv --run\",\n",
" shell=True,\n",
" stdout=log_file,\n",
" stderr=log_file,\n",
@ -172,7 +174,7 @@
"\n",
"def kill_llama_stack_server():\n",
" # Kill any existing llama stack server processes using pkill command\n",
" os.system(\"pkill -f llama_stack.core.server.server\")"
" os.system(\"pkill -f llama_stack.core.server.server\")\n"
]
},
{
@ -1261,4 +1263,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}

View file

@ -105,7 +105,9 @@
" \"\"\"Build and run LlamaStack server in one step using --run flag\"\"\"\n",
" log_file = open(\"llama_stack_server.log\", \"w\")\n",
" process = subprocess.Popen(\n",
" \"uv run --with llama-stack llama stack build --distro starter --image-type venv --run\",\n",
" \"uv pip install llama-stack\n",
"llama stack list-deps starter | xargs -L1 uv pip install\n",
"llama stack run starter --image-type venv --run\",\n",
" shell=True,\n",
" stdout=log_file,\n",
" stderr=log_file,\n",
@ -141,7 +143,7 @@
"\n",
"def kill_llama_stack_server():\n",
" # Kill any existing llama stack server processes using pkill command\n",
" os.system(\"pkill -f llama_stack.core.server.server\")"
" os.system(\"pkill -f llama_stack.core.server.server\")\n"
]
},
{
@ -698,4 +700,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}

View file

@ -91,9 +91,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"```bash\n",
"LLAMA_STACK_DIR=$(pwd) llama stack build --distro nvidia --image-type venv\n",
"```"
"```bash\nuv pip install llama-stack\nllama stack list-deps nvidia | xargs -L1 uv pip install\n```\n"
]
},
{
@ -1682,4 +1680,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}

View file

@ -80,9 +80,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"```bash\n",
"LLAMA_STACK_DIR=$(pwd) llama stack build --distro nvidia --image-type venv\n",
"```"
"```bash\nuv pip install llama-stack\nllama stack list-deps nvidia | xargs -L1 uv pip install\n```\n"
]
},
{
@ -592,4 +590,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}