chore: update doc

# What does this PR do?


## Test Plan
This commit is contained in:
Eric Huang 2025-10-19 21:47:44 -07:00
parent a9ff8c5c9f
commit bd2411354c
25 changed files with 6382 additions and 6388 deletions

View file

@ -2864,7 +2864,7 @@
}
],
"source": [
"!llama stack build --distro experimental-post-training --image-type venv --image-name __system__"
"llama stack list-deps experimental-post-training | xargs -L1 uv pip install --image-name __system__\n"
]
},
{

View file

@ -38,7 +38,7 @@
"source": [
"# NBVAL_SKIP\n",
"!pip install -U llama-stack\n",
"!UV_SYSTEM_PYTHON=1 llama stack build --distro fireworks --image-type venv"
"llama stack list-deps fireworks | xargs -L1 uv pip install\n"
]
},
{

File diff suppressed because it is too large Load diff

View file

@ -136,7 +136,8 @@
" \"\"\"Build and run LlamaStack server in one step using --run flag\"\"\"\n",
" log_file = open(\"llama_stack_server.log\", \"w\")\n",
" process = subprocess.Popen(\n",
" \"uv run --with llama-stack llama stack build --distro starter --image-type venv --run\",\n",
"llama stack list-deps starter | xargs -L1 uv pip install\n",
"llama stack run starter --image-type venv --run\",\n",
" shell=True,\n",
" stdout=log_file,\n",
" stderr=log_file,\n",
@ -172,7 +173,7 @@
"\n",
"def kill_llama_stack_server():\n",
" # Kill any existing llama stack server processes using pkill command\n",
" os.system(\"pkill -f llama_stack.core.server.server\")"
" os.system(\"pkill -f llama_stack.core.server.server\")\n"
]
},
{

View file

@ -105,7 +105,8 @@
" \"\"\"Build and run LlamaStack server in one step using --run flag\"\"\"\n",
" log_file = open(\"llama_stack_server.log\", \"w\")\n",
" process = subprocess.Popen(\n",
" \"uv run --with llama-stack llama stack build --distro starter --image-type venv --run\",\n",
"llama stack list-deps starter | xargs -L1 uv pip install\n",
"llama stack run starter --image-type venv --run\",\n",
" shell=True,\n",
" stdout=log_file,\n",
" stderr=log_file,\n",
@ -141,7 +142,7 @@
"\n",
"def kill_llama_stack_server():\n",
" # Kill any existing llama stack server processes using pkill command\n",
" os.system(\"pkill -f llama_stack.core.server.server\")"
" os.system(\"pkill -f llama_stack.core.server.server\")\n"
]
},
{

View file

@ -90,11 +90,7 @@
{
"cell_type": "markdown",
"metadata": {},
"source": [
"```bash\n",
"LLAMA_STACK_DIR=$(pwd) llama stack build --distro nvidia --image-type venv\n",
"```"
]
"source": []
},
{
"cell_type": "markdown",

View file

@ -79,11 +79,7 @@
{
"cell_type": "markdown",
"metadata": {},
"source": [
"```bash\n",
"LLAMA_STACK_DIR=$(pwd) llama stack build --distro nvidia --image-type venv\n",
"```"
]
"source": []
},
{
"cell_type": "markdown",