litellm/cookbook/liteLLM_Ollama.ipynb
ishaan-jaff f898693c9c v0
2023-08-14 07:41:53 -07:00

173 lines
7.6 KiB
Text
Vendored

{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Defaulting to user installation because normal site-packages is not writeable\n",
"Requirement already satisfied: litellm==0.1.385 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (0.1.385)\n",
"Requirement already satisfied: openai<0.28.0,>=0.27.8 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.385) (0.27.8)\n",
"Requirement already satisfied: python-dotenv<2.0.0,>=1.0.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.385) (1.0.0)\n",
"Requirement already satisfied: tiktoken<0.5.0,>=0.4.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.385) (0.4.0)\n",
"Requirement already satisfied: requests>=2.20 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from openai<0.28.0,>=0.27.8->litellm==0.1.385) (2.28.2)\n",
"Requirement already satisfied: tqdm in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from openai<0.28.0,>=0.27.8->litellm==0.1.385) (4.65.0)\n",
"Requirement already satisfied: aiohttp in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from openai<0.28.0,>=0.27.8->litellm==0.1.385) (3.8.4)\n",
"Requirement already satisfied: regex>=2022.1.18 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from tiktoken<0.5.0,>=0.4.0->litellm==0.1.385) (2023.6.3)\n",
"Requirement already satisfied: charset-normalizer<4,>=2 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.28.0,>=0.27.8->litellm==0.1.385) (3.1.0)\n",
"Requirement already satisfied: idna<4,>=2.5 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.28.0,>=0.27.8->litellm==0.1.385) (3.4)\n",
"Requirement already satisfied: urllib3<1.27,>=1.21.1 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.28.0,>=0.27.8->litellm==0.1.385) (1.26.6)\n",
"Requirement already satisfied: certifi>=2017.4.17 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.28.0,>=0.27.8->litellm==0.1.385) (2023.5.7)\n",
"Requirement already satisfied: attrs>=17.3.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.385) (23.1.0)\n",
"Requirement already satisfied: multidict<7.0,>=4.5 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.385) (6.0.4)\n",
"Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.385) (4.0.2)\n",
"Requirement already satisfied: yarl<2.0,>=1.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.385) (1.9.2)\n",
"Requirement already satisfied: frozenlist>=1.1.1 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.385) (1.3.3)\n",
"Requirement already satisfied: aiosignal>=1.1.2 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.385) (1.3.1)\n",
"\n",
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m23.1.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m23.2.1\u001b[0m\n",
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49m/Library/Developer/CommandLineTools/usr/bin/python3 -m pip install --upgrade pip\u001b[0m\n"
]
}
],
"source": [
"!pip install litellm==0.1.385"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"from litellm import completion\n",
"import asyncio"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"### Setup Messages"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"user_message = \"respond in 20 words. who are you?\"\n",
"messages = [{ \"content\": user_message,\"role\": \"user\"}]"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"### Call Ollama - llama2 with chatGPT Input/Output using litellm.completion() "
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"<async_generator object get_ollama_response_stream at 0x106b11790>\n"
]
}
],
"source": [
"response = completion(model=\"llama2\", messages=messages, custom_api_base=\"http://localhost:11434\", custom_llm_provider=\"ollama\", stream=True)\n",
"print(response)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## Iterate through the generator - Streaming"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"{'choices': [{'delta': {'role': 'assistant', 'content': ' I'}}]}\n",
"{'choices': [{'delta': {'role': 'assistant', 'content': \"'\"}}]}\n",
"{'choices': [{'delta': {'role': 'assistant', 'content': 'm'}}]}\n",
"{'choices': [{'delta': {'role': 'assistant', 'content': ' an'}}]}\n",
"{'choices': [{'delta': {'role': 'assistant', 'content': ' A'}}]}\n",
"{'choices': [{'delta': {'role': 'assistant', 'content': 'I'}}]}\n",
"{'choices': [{'delta': {'role': 'assistant', 'content': ' assistant'}}]}\n",
"{'choices': [{'delta': {'role': 'assistant', 'content': ' trained'}}]}\n",
"{'choices': [{'delta': {'role': 'assistant', 'content': ' to'}}]}\n",
"{'choices': [{'delta': {'role': 'assistant', 'content': ' help'}}]}\n",
"{'choices': [{'delta': {'role': 'assistant', 'content': ' with'}}]}\n",
"{'choices': [{'delta': {'role': 'assistant', 'content': ' tasks'}}]}\n",
"{'choices': [{'delta': {'role': 'assistant', 'content': ' and'}}]}\n",
"{'choices': [{'delta': {'role': 'assistant', 'content': ' answer'}}]}\n",
"{'choices': [{'delta': {'role': 'assistant', 'content': ' questions'}}]}\n",
"{'choices': [{'delta': {'role': 'assistant', 'content': '.'}}]}\n"
]
}
],
"source": [
"\n",
"async def get_response(generator):\n",
" response = \"\"\n",
" async for elem in generator:\n",
" print(elem)\n",
" response += elem['choices'][0]['delta'][\"content\"]\n",
" return response\n",
"\n",
"string_response = await get_response(response)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.6"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}