diff --git a/docs/getting_started.ipynb b/docs/getting_started.ipynb index ee616b471..51ae945f4 100644 --- a/docs/getting_started.ipynb +++ b/docs/getting_started.ipynb @@ -3419,22 +3419,22 @@ }, { "cell_type": "code", - "execution_count": null, - "id": "865fc5a8", - "metadata": {}, - "outputs": [], - "source": [ - "!pip install llama-stack-client==0.1.0" - ] - }, - { - "cell_type": "code", - "execution_count": null, + "execution_count": 3, "id": "44e05e16", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " % Total % Received % Xferd Average Speed Time Time Time Current\n", + " Dload Upload Total Spent Left Speed\n", + "100 275k 100 275k 0 0 780k 0 --:--:-- --:--:-- --:--:-- 780k\n" + ] + } + ], "source": [ - "!wget https://raw.githubusercontent.com/meta-llama/llama-models/refs/heads/main/Llama_Repo.jpeg" + "!curl -O https://raw.githubusercontent.com/meta-llama/llama-models/refs/heads/main/Llama_Repo.jpeg" ] }, { @@ -3444,6 +3444,7 @@ "metadata": {}, "outputs": [], "source": [ + "# NBVAL_SKIP\n", "from PIL import Image\n", "import matplotlib.pyplot as plt\n", "\n", @@ -3580,6 +3581,7 @@ " model=LLAMA32_11B_INSTRUCT,\n", " instructions=\"You are a helpful assistant\",\n", " enable_session_persistence=False,\n", + " toolgroups=[],\n", " )\n", "\n", " agent = Agent(client, agent_config)\n", @@ -3630,7 +3632,7 @@ "provenance": [] }, "kernelspec": { - "display_name": "toolchain", + "display_name": "master", "language": "python", "name": "python3" }, @@ -3644,7 +3646,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.15" + "version": "3.10.16" }, "widgets": { "application/vnd.jupyter.widget-state+json": {