install pandas and numpy beforehand to avoid version mismatch

This commit is contained in:
Hardik Shah 2025-03-20 16:53:32 -07:00
parent 581e8ae562
commit 6e0bc5b078

View file

@ -123,6 +123,11 @@
"outputs": [],
"source": [
"# NBVAL_SKIP\n",
"\n",
"# Need to install these together beforehand else it an lead to incompatible versions between these packages\n",
"!pip uninstall pandas numpy -y\n",
"!pip install pandas numpy\n",
"\n",
"# This will build all the dependencies you will need\n",
"!UV_SYSTEM_PYTHON=1 llama stack build --template together --image-type venv"
]
@ -1636,7 +1641,7 @@
"from termcolor import cprint\n",
"\n",
"agent = Agent(\n",
" client, \n",
" client,\n",
" model=model_id,\n",
" instructions=\"You are a helpful assistant. Use websearch tool to help answer questions.\",\n",
" tools=[\"builtin::websearch\"],\n",
@ -1833,7 +1838,7 @@
" chunk_size_in_tokens=512,\n",
")\n",
"rag_agent = Agent(\n",
" client, \n",
" client,\n",
" model=model_id,\n",
" instructions=\"You are a helpful assistant\",\n",
" tools = [\n",
@ -1969,7 +1974,7 @@
"from llama_stack_client import Document\n",
"\n",
"codex_agent = Agent(\n",
" client, \n",
" client,\n",
" model=\"meta-llama/Llama-3.1-8B-Instruct\",\n",
" instructions=\"You are a helpful assistant\",\n",
" tools=[\n",
@ -2480,7 +2485,6 @@
},
{
"data": {
"application/javascript": "\n (async () => {\n const url = new URL(await google.colab.kernel.proxyPort(10000, {'cache': true}));\n const iframe = document.createElement('iframe');\n iframe.src = url;\n iframe.setAttribute('width', '100%');\n iframe.setAttribute('height', '800');\n iframe.setAttribute('frameborder', 0);\n document.body.appendChild(iframe);\n })();\n ",
"text/plain": [
"<IPython.core.display.Javascript object>"
]
@ -2892,7 +2896,7 @@
"from termcolor import cprint\n",
"\n",
"agent = Agent(\n",
" client, \n",
" client,\n",
" model=model_id,\n",
" instructions=\"You are a helpful assistant\",\n",
" tools=[\"mcp::filesystem\"],\n",
@ -2992,7 +2996,7 @@
"from llama_stack_client import Agent, AgentEventLogger\n",
"\n",
"agent = Agent(\n",
" client, \n",
" client,\n",
" model=\"meta-llama/Llama-3.3-70B-Instruct\",\n",
" instructions=\"You are a helpful assistant. Use search tool to answer the questions. \",\n",
" tools=[\"builtin::websearch\"],\n",
@ -4323,7 +4327,7 @@
],
"source": [
"agent = Agent(\n",
" client, \n",
" client,\n",
" model=vision_model_id,\n",
" instructions=\"You are a helpful assistant\",\n",
")\n",
@ -4351,8 +4355,7 @@
")\n",
"\n",
"for log in EventLogger().log(response):\n",
" log.print()\n",
" "
" log.print()\n"
]
},
{
@ -4370,6 +4373,9 @@
"gpuType": "T4",
"provenance": []
},
"fileHeader": "",
"fileUid": "e07d15da-69ef-456e-b4d6-f15fde511281",
"isAdHoc": false,
"kernelspec": {
"display_name": "master",
"language": "python",
@ -9863,7 +9869,5 @@
}
}
}
},
"nbformat": 4,
"nbformat_minor": 5
}
}