forked from phoenix/litellm-mirror
ollama exception mapping + new cookbook
This commit is contained in:
parent
0c895b76fe
commit
935d148423
2 changed files with 169 additions and 76 deletions
242
cookbook/liteLLM_Ollama.ipynb
vendored
242
cookbook/liteLLM_Ollama.ipynb
vendored
|
@ -10,42 +10,34 @@
|
|||
"output_type": "stream",
|
||||
"text": [
|
||||
"Defaulting to user installation because normal site-packages is not writeable\n",
|
||||
"Requirement already satisfied: litellm==0.1.385 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (0.1.385)\n",
|
||||
"Requirement already satisfied: openai<0.28.0,>=0.27.8 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.385) (0.27.8)\n",
|
||||
"Requirement already satisfied: python-dotenv<2.0.0,>=1.0.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.385) (1.0.0)\n",
|
||||
"Requirement already satisfied: tiktoken<0.5.0,>=0.4.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.385) (0.4.0)\n",
|
||||
"Requirement already satisfied: requests>=2.20 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from openai<0.28.0,>=0.27.8->litellm==0.1.385) (2.28.2)\n",
|
||||
"Requirement already satisfied: tqdm in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from openai<0.28.0,>=0.27.8->litellm==0.1.385) (4.65.0)\n",
|
||||
"Requirement already satisfied: aiohttp in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from openai<0.28.0,>=0.27.8->litellm==0.1.385) (3.8.4)\n",
|
||||
"Requirement already satisfied: regex>=2022.1.18 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from tiktoken<0.5.0,>=0.4.0->litellm==0.1.385) (2023.6.3)\n",
|
||||
"Requirement already satisfied: charset-normalizer<4,>=2 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.28.0,>=0.27.8->litellm==0.1.385) (3.1.0)\n",
|
||||
"Requirement already satisfied: idna<4,>=2.5 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.28.0,>=0.27.8->litellm==0.1.385) (3.4)\n",
|
||||
"Requirement already satisfied: urllib3<1.27,>=1.21.1 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.28.0,>=0.27.8->litellm==0.1.385) (1.26.6)\n",
|
||||
"Requirement already satisfied: certifi>=2017.4.17 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.28.0,>=0.27.8->litellm==0.1.385) (2023.5.7)\n",
|
||||
"Requirement already satisfied: attrs>=17.3.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.385) (23.1.0)\n",
|
||||
"Requirement already satisfied: multidict<7.0,>=4.5 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.385) (6.0.4)\n",
|
||||
"Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.385) (4.0.2)\n",
|
||||
"Requirement already satisfied: yarl<2.0,>=1.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.385) (1.9.2)\n",
|
||||
"Requirement already satisfied: frozenlist>=1.1.1 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.385) (1.3.3)\n",
|
||||
"Requirement already satisfied: aiosignal>=1.1.2 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.385) (1.3.1)\n",
|
||||
"\n",
|
||||
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m23.1.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m23.2.1\u001b[0m\n",
|
||||
"\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49m/Library/Developer/CommandLineTools/usr/bin/python3 -m pip install --upgrade pip\u001b[0m\n"
|
||||
"Requirement already satisfied: litellm==0.1.723 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (0.1.723)\n",
|
||||
"Requirement already satisfied: importlib-metadata<7.0.0,>=6.8.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.723) (6.8.0)\n",
|
||||
"Requirement already satisfied: openai<0.29.0,>=0.27.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.723) (0.28.0)\n",
|
||||
"Requirement already satisfied: python-dotenv>=0.2.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.723) (1.0.0)\n",
|
||||
"Requirement already satisfied: tiktoken<0.5.0,>=0.4.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.723) (0.4.0)\n",
|
||||
"Requirement already satisfied: zipp>=0.5 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from importlib-metadata<7.0.0,>=6.8.0->litellm==0.1.723) (3.15.0)\n",
|
||||
"Requirement already satisfied: requests>=2.20 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from openai<0.29.0,>=0.27.0->litellm==0.1.723) (2.28.2)\n",
|
||||
"Requirement already satisfied: tqdm in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from openai<0.29.0,>=0.27.0->litellm==0.1.723) (4.65.0)\n",
|
||||
"Requirement already satisfied: aiohttp in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from openai<0.29.0,>=0.27.0->litellm==0.1.723) (3.8.4)\n",
|
||||
"Requirement already satisfied: regex>=2022.1.18 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from tiktoken<0.5.0,>=0.4.0->litellm==0.1.723) (2023.6.3)\n",
|
||||
"Requirement already satisfied: charset-normalizer<4,>=2 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.29.0,>=0.27.0->litellm==0.1.723) (3.1.0)\n",
|
||||
"Requirement already satisfied: idna<4,>=2.5 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.29.0,>=0.27.0->litellm==0.1.723) (3.4)\n",
|
||||
"Requirement already satisfied: urllib3<1.27,>=1.21.1 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.29.0,>=0.27.0->litellm==0.1.723) (1.26.6)\n",
|
||||
"Requirement already satisfied: certifi>=2017.4.17 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.29.0,>=0.27.0->litellm==0.1.723) (2023.5.7)\n",
|
||||
"Requirement already satisfied: attrs>=17.3.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.29.0,>=0.27.0->litellm==0.1.723) (23.1.0)\n",
|
||||
"Requirement already satisfied: multidict<7.0,>=4.5 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.29.0,>=0.27.0->litellm==0.1.723) (6.0.4)\n",
|
||||
"Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.29.0,>=0.27.0->litellm==0.1.723) (4.0.2)\n",
|
||||
"Requirement already satisfied: yarl<2.0,>=1.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.29.0,>=0.27.0->litellm==0.1.723) (1.9.2)\n",
|
||||
"Requirement already satisfied: frozenlist>=1.1.1 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.29.0,>=0.27.0->litellm==0.1.723) (1.3.3)\n",
|
||||
"Requirement already satisfied: aiosignal>=1.1.2 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.29.0,>=0.27.0->litellm==0.1.723) (1.3.1)\n",
|
||||
"Defaulting to user installation because normal site-packages is not writeable\n",
|
||||
"Requirement already satisfied: async_generator in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (1.10)\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"!pip install litellm==0.1.385"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from litellm import completion\n",
|
||||
"import asyncio"
|
||||
"!pip install litellm==0.1.723\n",
|
||||
"!pip install async_generator"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -53,43 +45,63 @@
|
|||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Setup Messages"
|
||||
"## Call Ollama - llama2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"user_message = \"respond in 20 words. who are you?\"\n",
|
||||
"messages = [{ \"content\": user_message,\"role\": \"user\"}]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"attachments": {},
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Call Ollama - llama2 with chatGPT Input/Output using litellm.completion() "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"<async_generator object get_ollama_response_stream at 0x106b11790>\n"
|
||||
"<generator object get_ollama_response_stream at 0x108e16740>\n",
|
||||
"{'role': 'assistant', 'content': ' I'}\n",
|
||||
"{'role': 'assistant', 'content': ' am'}\n",
|
||||
"{'role': 'assistant', 'content': ' L'}\n",
|
||||
"{'role': 'assistant', 'content': 'La'}\n",
|
||||
"{'role': 'assistant', 'content': 'MA'}\n",
|
||||
"{'role': 'assistant', 'content': ','}\n",
|
||||
"{'role': 'assistant', 'content': ' an'}\n",
|
||||
"{'role': 'assistant', 'content': ' A'}\n",
|
||||
"{'role': 'assistant', 'content': 'I'}\n",
|
||||
"{'role': 'assistant', 'content': ' assistant'}\n",
|
||||
"{'role': 'assistant', 'content': ' developed'}\n",
|
||||
"{'role': 'assistant', 'content': ' by'}\n",
|
||||
"{'role': 'assistant', 'content': ' Meta'}\n",
|
||||
"{'role': 'assistant', 'content': ' A'}\n",
|
||||
"{'role': 'assistant', 'content': 'I'}\n",
|
||||
"{'role': 'assistant', 'content': ' that'}\n",
|
||||
"{'role': 'assistant', 'content': ' can'}\n",
|
||||
"{'role': 'assistant', 'content': ' understand'}\n",
|
||||
"{'role': 'assistant', 'content': ' and'}\n",
|
||||
"{'role': 'assistant', 'content': ' respond'}\n",
|
||||
"{'role': 'assistant', 'content': ' to'}\n",
|
||||
"{'role': 'assistant', 'content': ' human'}\n",
|
||||
"{'role': 'assistant', 'content': ' input'}\n",
|
||||
"{'role': 'assistant', 'content': ' in'}\n",
|
||||
"{'role': 'assistant', 'content': ' a'}\n",
|
||||
"{'role': 'assistant', 'content': ' convers'}\n",
|
||||
"{'role': 'assistant', 'content': 'ational'}\n",
|
||||
"{'role': 'assistant', 'content': ' manner'}\n",
|
||||
"{'role': 'assistant', 'content': '.'}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"response = completion(model=\"llama2\", messages=messages, api_base=\"http://localhost:11434\", custom_llm_provider=\"ollama\", stream=True)\n",
|
||||
"print(response)"
|
||||
"from litellm import completion\n",
|
||||
"\n",
|
||||
"response = completion(\n",
|
||||
" model=\"ollama/llama2\", \n",
|
||||
" messages=[{ \"content\": \"respond in 20 words. who are you?\",\"role\": \"user\"}], \n",
|
||||
" api_base=\"http://localhost:11434\", \n",
|
||||
" stream=True\n",
|
||||
")\n",
|
||||
"print(response)\n",
|
||||
"for chunk in response:\n",
|
||||
" print(chunk['choices'][0]['delta'])\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -97,12 +109,12 @@
|
|||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Iterate through the generator - Streaming"
|
||||
"## Call Ollama - Llama2 with Acompletion + Streaming"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
|
@ -110,34 +122,112 @@
|
|||
"output_type": "stream",
|
||||
"text": [
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' I'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': \"'\"}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': 'm'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' an'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' am'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' a'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' text'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': '-'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': 'based'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' A'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': 'I'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' assistant'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' trained'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' to'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' help'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' with'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' tasks'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' language'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' model'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' and'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' answer'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' questions'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': '.'}}]}\n"
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' I'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' don'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': \"'\"}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': 't'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' have'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' access'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' to'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' real'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': '-'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': 'time'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' weather'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' information'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': '.'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': 'ℝ'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' However'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ','}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' I'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' can'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' tell'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' you'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' the'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' current'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' weather'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' conditions'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' for'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' a'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' specific'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' location'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' if'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' you'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' provide'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' me'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' with'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' the'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' city'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' or'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' zip'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' code'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': '.'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' Alternatively'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ','}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' you'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' can'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' check'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' the'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' weather'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' for'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' any'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' location'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' in'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' the'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' world'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' by'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' using'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' a'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' search'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' engine'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' or'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' visit'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': 'ing'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' a'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' weather'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': ' website'}}]}\n",
|
||||
"{'choices': [{'delta': {'role': 'assistant', 'content': '.'}}]}\n",
|
||||
"None\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ename": "TypeError",
|
||||
"evalue": "'async for' requires an object with __aiter__ method, got NoneType",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)",
|
||||
"Cell \u001b[0;32mIn[4], line 15\u001b[0m\n\u001b[1;32m 13\u001b[0m result \u001b[39m=\u001b[39m \u001b[39mawait\u001b[39;00m async_ollama()\n\u001b[1;32m 14\u001b[0m \u001b[39mprint\u001b[39m(result)\n\u001b[0;32m---> 15\u001b[0m \u001b[39masync\u001b[39;00m \u001b[39mfor\u001b[39;00m chunk \u001b[39min\u001b[39;00m result:\n\u001b[1;32m 16\u001b[0m \u001b[39mprint\u001b[39m(chunk)\n",
|
||||
"\u001b[0;31mTypeError\u001b[0m: 'async for' requires an object with __aiter__ method, got NoneType"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import litellm\n",
|
||||
"\n",
|
||||
"async def get_response(generator):\n",
|
||||
" response = \"\"\n",
|
||||
" async for elem in generator:\n",
|
||||
" print(elem)\n",
|
||||
" response += elem['choices'][0]['delta'][\"content\"]\n",
|
||||
" return response\n",
|
||||
"async def async_ollama():\n",
|
||||
" response = await litellm.acompletion(\n",
|
||||
" model=\"ollama/llama2\", \n",
|
||||
" messages=[{ \"content\": \"what's the weather\" ,\"role\": \"user\"}], \n",
|
||||
" api_base=\"http://localhost:11434\", \n",
|
||||
" stream=True\n",
|
||||
" )\n",
|
||||
" async for chunk in response:\n",
|
||||
" print(chunk)\n",
|
||||
"\n",
|
||||
"string_response = await get_response(response)"
|
||||
"result = await async_ollama()\n",
|
||||
"print(result)\n",
|
||||
"async for chunk in result:\n",
|
||||
" print(chunk)"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue