diff --git a/docs/getting_started.ipynb b/docs/getting_started.ipynb index 5de401b5c..e361be277 100644 --- a/docs/getting_started.ipynb +++ b/docs/getting_started.ipynb @@ -123,11 +123,6 @@ "outputs": [], "source": [ "# NBVAL_SKIP\n", - "\n", - "# Need to install these together beforehand else it an lead to incompatible versions between these packages\n", - "!pip uninstall pandas numpy -y\n", - "!pip install pandas numpy\n", - "\n", "# This will build all the dependencies you will need\n", "!UV_SYSTEM_PYTHON=1 llama stack build --template together --image-type venv" ] @@ -1641,7 +1636,7 @@ "from termcolor import cprint\n", "\n", "agent = Agent(\n", - " client,\n", + " client, \n", " model=model_id,\n", " instructions=\"You are a helpful assistant. Use websearch tool to help answer questions.\",\n", " tools=[\"builtin::websearch\"],\n", @@ -1838,7 +1833,7 @@ " chunk_size_in_tokens=512,\n", ")\n", "rag_agent = Agent(\n", - " client,\n", + " client, \n", " model=model_id,\n", " instructions=\"You are a helpful assistant\",\n", " tools = [\n", @@ -1974,7 +1969,7 @@ "from llama_stack_client import Document\n", "\n", "codex_agent = Agent(\n", - " client,\n", + " client, \n", " model=\"meta-llama/Llama-3.1-8B-Instruct\",\n", " instructions=\"You are a helpful assistant\",\n", " tools=[\n", @@ -2485,6 +2480,7 @@ }, { "data": { + "application/javascript": "\n (async () => {\n const url = new URL(await google.colab.kernel.proxyPort(10000, {'cache': true}));\n const iframe = document.createElement('iframe');\n iframe.src = url;\n iframe.setAttribute('width', '100%');\n iframe.setAttribute('height', '800');\n iframe.setAttribute('frameborder', 0);\n document.body.appendChild(iframe);\n })();\n ", "text/plain": [ "" ] @@ -2896,7 +2892,7 @@ "from termcolor import cprint\n", "\n", "agent = Agent(\n", - " client,\n", + " client, \n", " model=model_id,\n", " instructions=\"You are a helpful assistant\",\n", " tools=[\"mcp::filesystem\"],\n", @@ -2996,7 +2992,7 @@ "from llama_stack_client import Agent, AgentEventLogger\n", "\n", "agent = Agent(\n", - " client,\n", + " client, \n", " model=\"meta-llama/Llama-3.3-70B-Instruct\",\n", " instructions=\"You are a helpful assistant. Use search tool to answer the questions. \",\n", " tools=[\"builtin::websearch\"],\n", @@ -4327,7 +4323,7 @@ ], "source": [ "agent = Agent(\n", - " client,\n", + " client, \n", " model=vision_model_id,\n", " instructions=\"You are a helpful assistant\",\n", ")\n", @@ -4355,7 +4351,8 @@ ")\n", "\n", "for log in EventLogger().log(response):\n", - " log.print()\n" + " log.print()\n", + " " ] }, { @@ -4373,9 +4370,6 @@ "gpuType": "T4", "provenance": [] }, - "fileHeader": "", - "fileUid": "e07d15da-69ef-456e-b4d6-f15fde511281", - "isAdHoc": false, "kernelspec": { "display_name": "master", "language": "python", @@ -9869,5 +9863,7 @@ } } } - } + }, + "nbformat": 4, + "nbformat_minor": 5 }