forked from phoenix/litellm-mirror
(docs) add parallel function calling cookbook
This commit is contained in:
parent
32f8551357
commit
16569c9bca
1 changed files with 261 additions and 0 deletions
261
cookbook/Parallel_function_calling.ipynb
vendored
Normal file
261
cookbook/Parallel_function_calling.ipynb
vendored
Normal file
|
@ -0,0 +1,261 @@
|
|||
{
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"provenance": []
|
||||
},
|
||||
"kernelspec": {
|
||||
"name": "python3",
|
||||
"display_name": "Python 3"
|
||||
},
|
||||
"language_info": {
|
||||
"name": "python"
|
||||
}
|
||||
},
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"This is a tutorial on using Parallel function calling with LiteLLM"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "gHwFJ-srdnku"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"id": "RrtHuVHlZmUe"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"!pip install litellm"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# set openai api key\n",
|
||||
"import os\n",
|
||||
"os.environ['OPENAI_API_KEY'] = \"\" # litellm reads OPENAI_API_KEY from .env and sends the request"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "l4GQ-M5yZ5UW"
|
||||
},
|
||||
"execution_count": 3,
|
||||
"outputs": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"## Step 1: send the conversation and available functions to the model"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "AxgR2fCgaRoW"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"import litellm\n",
|
||||
"import json\n",
|
||||
"# Example dummy function hard coded to return the same weather\n",
|
||||
"# In production, this could be your backend API or an external API\n",
|
||||
"def get_current_weather(location, unit=\"fahrenheit\"):\n",
|
||||
" \"\"\"Get the current weather in a given location\"\"\"\n",
|
||||
" if \"tokyo\" in location.lower():\n",
|
||||
" return json.dumps({\"location\": \"Tokyo\", \"temperature\": \"10\", \"unit\": \"celsius\"})\n",
|
||||
" elif \"san francisco\" in location.lower():\n",
|
||||
" return json.dumps({\"location\": \"San Francisco\", \"temperature\": \"72\", \"unit\": \"fahrenheit\"})\n",
|
||||
" elif \"paris\" in location.lower():\n",
|
||||
" return json.dumps({\"location\": \"Paris\", \"temperature\": \"22\", \"unit\": \"celsius\"})\n",
|
||||
" else:\n",
|
||||
" return json.dumps({\"location\": location, \"temperature\": \"unknown\"})\n",
|
||||
"\n",
|
||||
"messages = [{\"role\": \"user\", \"content\": \"What's the weather like in San Francisco, Tokyo, and Paris?\"}]\n",
|
||||
"tools = [\n",
|
||||
" {\n",
|
||||
" \"type\": \"function\",\n",
|
||||
" \"function\": {\n",
|
||||
" \"name\": \"get_current_weather\",\n",
|
||||
" \"description\": \"Get the current weather in a given location\",\n",
|
||||
" \"parameters\": {\n",
|
||||
" \"type\": \"object\",\n",
|
||||
" \"properties\": {\n",
|
||||
" \"location\": {\n",
|
||||
" \"type\": \"string\",\n",
|
||||
" \"description\": \"The city and state, e.g. San Francisco, CA\",\n",
|
||||
" },\n",
|
||||
" \"unit\": {\"type\": \"string\", \"enum\": [\"celsius\", \"fahrenheit\"]},\n",
|
||||
" },\n",
|
||||
" \"required\": [\"location\"],\n",
|
||||
" },\n",
|
||||
" },\n",
|
||||
" }\n",
|
||||
"]\n",
|
||||
"\n",
|
||||
"response = litellm.completion(\n",
|
||||
" model=\"gpt-3.5-turbo-1106\",\n",
|
||||
" messages=messages,\n",
|
||||
" tools=tools,\n",
|
||||
" tool_choice=\"auto\", # auto is default, but we'll be explicit\n",
|
||||
")\n",
|
||||
"print(\"\\nLLM Response1:\\n\", response)\n",
|
||||
"response_message = response.choices[0].message\n",
|
||||
"tool_calls = response.choices[0].message.tool_calls\n",
|
||||
"print(\"\\nTool Choice:\\n\", tool_calls)\n"
|
||||
],
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
},
|
||||
"id": "Y3qteFo8ZrZP",
|
||||
"outputId": "ee6c1183-55c1-4111-cdc0-967b8fed9db3"
|
||||
},
|
||||
"execution_count": 18,
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"\n",
|
||||
"LLM Response1:\n",
|
||||
" ModelResponse(id='chatcmpl-8MNdPbrhtnwiPK1x3PEoGwrH144TW', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(id='call_K2Giwoq3NloGPfSv25MJVFZG', function=Function(arguments='{\"location\": \"San Francisco\", \"unit\": \"celsius\"}', name='get_current_weather'), type='function'), ChatCompletionMessageToolCall(id='call_6K8bYCZK6qsbMY3n51FzE5Nz', function=Function(arguments='{\"location\": \"Tokyo\", \"unit\": \"celsius\"}', name='get_current_weather'), type='function'), ChatCompletionMessageToolCall(id='call_cKSmUEJGufDwS7TaUHWzp7qx', function=Function(arguments='{\"location\": \"Paris\", \"unit\": \"celsius\"}', name='get_current_weather'), type='function')]))], created=1700344759, model='gpt-3.5-turbo-1106', object='chat.completion', system_fingerprint='fp_eeff13170a', usage={'completion_tokens': 77, 'prompt_tokens': 88, 'total_tokens': 165}, _response_ms=1049.913)\n",
|
||||
"\n",
|
||||
"Tool Choice:\n",
|
||||
" [ChatCompletionMessageToolCall(id='call_K2Giwoq3NloGPfSv25MJVFZG', function=Function(arguments='{\"location\": \"San Francisco\", \"unit\": \"celsius\"}', name='get_current_weather'), type='function'), ChatCompletionMessageToolCall(id='call_6K8bYCZK6qsbMY3n51FzE5Nz', function=Function(arguments='{\"location\": \"Tokyo\", \"unit\": \"celsius\"}', name='get_current_weather'), type='function'), ChatCompletionMessageToolCall(id='call_cKSmUEJGufDwS7TaUHWzp7qx', function=Function(arguments='{\"location\": \"Paris\", \"unit\": \"celsius\"}', name='get_current_weather'), type='function')]\n"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"## Step 2 - Parse the Model Response and Execute Functions"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "tD4lJQ40cU44"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Check if the model wants to call a function\n",
|
||||
"if tool_calls:\n",
|
||||
" # Execute the functions and prepare responses\n",
|
||||
" available_functions = {\n",
|
||||
" \"get_current_weather\": get_current_weather,\n",
|
||||
" }\n",
|
||||
"\n",
|
||||
" messages.append(response_message) # Extend conversation with assistant's reply\n",
|
||||
"\n",
|
||||
" for tool_call in tool_calls:\n",
|
||||
" print(f\"\\nExecuting tool call\\n{tool_call}\")\n",
|
||||
" function_name = tool_call.function.name\n",
|
||||
" function_to_call = available_functions[function_name]\n",
|
||||
" function_args = json.loads(tool_call.function.arguments)\n",
|
||||
" # calling the get_current_weather() function\n",
|
||||
" function_response = function_to_call(\n",
|
||||
" location=function_args.get(\"location\"),\n",
|
||||
" unit=function_args.get(\"unit\"),\n",
|
||||
" )\n",
|
||||
" print(f\"Result from tool call\\n{function_response}\\n\")\n",
|
||||
"\n",
|
||||
" # Extend conversation with function response\n",
|
||||
" messages.append(\n",
|
||||
" {\n",
|
||||
" \"tool_call_id\": tool_call.id,\n",
|
||||
" \"role\": \"tool\",\n",
|
||||
" \"name\": function_name,\n",
|
||||
" \"content\": function_response,\n",
|
||||
" }\n",
|
||||
" )\n"
|
||||
],
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
},
|
||||
"id": "af4oXQvicV_n",
|
||||
"outputId": "abf6ac3e-4a21-4a4f-b8d7-809b763d0632"
|
||||
},
|
||||
"execution_count": 21,
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"\n",
|
||||
"Executing tool call\n",
|
||||
"ChatCompletionMessageToolCall(id='call_K2Giwoq3NloGPfSv25MJVFZG', function=Function(arguments='{\"location\": \"San Francisco\", \"unit\": \"celsius\"}', name='get_current_weather'), type='function')\n",
|
||||
"Result from tool call\n",
|
||||
"{\"location\": \"San Francisco\", \"temperature\": \"72\", \"unit\": \"fahrenheit\"}\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"Executing tool call\n",
|
||||
"ChatCompletionMessageToolCall(id='call_6K8bYCZK6qsbMY3n51FzE5Nz', function=Function(arguments='{\"location\": \"Tokyo\", \"unit\": \"celsius\"}', name='get_current_weather'), type='function')\n",
|
||||
"Result from tool call\n",
|
||||
"{\"location\": \"Tokyo\", \"temperature\": \"10\", \"unit\": \"celsius\"}\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"Executing tool call\n",
|
||||
"ChatCompletionMessageToolCall(id='call_cKSmUEJGufDwS7TaUHWzp7qx', function=Function(arguments='{\"location\": \"Paris\", \"unit\": \"celsius\"}', name='get_current_weather'), type='function')\n",
|
||||
"Result from tool call\n",
|
||||
"{\"location\": \"Paris\", \"temperature\": \"22\", \"unit\": \"celsius\"}\n",
|
||||
"\n"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"## Step 3 - Second litellm.completion() call"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "E3OL1fqUdFdv"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"second_response = litellm.completion(\n",
|
||||
" model=\"gpt-3.5-turbo-1106\",\n",
|
||||
" messages=messages,\n",
|
||||
")\n",
|
||||
"print(\"Second Response\\n\", second_response)\n",
|
||||
"print(\"Second Response Message\\n\", second_response.choices[0].message.content)\n"
|
||||
],
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
},
|
||||
"id": "8KYB2n-jc1_f",
|
||||
"outputId": "6c6448ae-1c09-43ae-eb90-208b118e6179"
|
||||
},
|
||||
"execution_count": 26,
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"Second Response\n",
|
||||
" ModelResponse(id='chatcmpl-8MNhat166ZqjO6egXcUh85Pd0s7KV', choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"The current weather in San Francisco is 72°F, in Tokyo it's 10°C, and in Paris it's 22°C.\", role='assistant'))], created=1700345018, model='gpt-3.5-turbo-1106', object='chat.completion', system_fingerprint='fp_eeff13170a', usage={'completion_tokens': 28, 'prompt_tokens': 465, 'total_tokens': 493}, _response_ms=999.246)\n",
|
||||
"Second Response Message\n",
|
||||
" The current weather in San Francisco is 72°F, in Tokyo it's 10°C, and in Paris it's 22°C.\n"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [],
|
||||
"metadata": {
|
||||
"id": "lG9mUnModeeE"
|
||||
},
|
||||
"execution_count": null,
|
||||
"outputs": []
|
||||
}
|
||||
]
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue