diff --git a/cookbook/liteLLM_Ollama.ipynb b/cookbook/liteLLM_Ollama.ipynb new file mode 100644 index 000000000..3de54d8ef --- /dev/null +++ b/cookbook/liteLLM_Ollama.ipynb @@ -0,0 +1,135 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Defaulting to user installation because normal site-packages is not writeable\n", + "Collecting litellm==0.1.384\n", + " Downloading litellm-0.1.384-py3-none-any.whl (43 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m43.1/43.1 kB\u001b[0m \u001b[31m2.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: openai<0.28.0,>=0.27.8 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.384) (0.27.8)\n", + "Requirement already satisfied: python-dotenv<2.0.0,>=1.0.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.384) (1.0.0)\n", + "Requirement already satisfied: tiktoken<0.5.0,>=0.4.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.384) (0.4.0)\n", + "Requirement already satisfied: requests>=2.20 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from openai<0.28.0,>=0.27.8->litellm==0.1.384) (2.28.2)\n", + "Requirement already satisfied: tqdm in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from openai<0.28.0,>=0.27.8->litellm==0.1.384) (4.65.0)\n", + "Requirement already satisfied: aiohttp in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from openai<0.28.0,>=0.27.8->litellm==0.1.384) (3.8.4)\n", + "Requirement already satisfied: regex>=2022.1.18 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from tiktoken<0.5.0,>=0.4.0->litellm==0.1.384) (2023.6.3)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.28.0,>=0.27.8->litellm==0.1.384) (3.1.0)\n", + "Requirement already satisfied: idna<4,>=2.5 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.28.0,>=0.27.8->litellm==0.1.384) (3.4)\n", + "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.28.0,>=0.27.8->litellm==0.1.384) (1.26.6)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.28.0,>=0.27.8->litellm==0.1.384) (2023.5.7)\n", + "Requirement already satisfied: attrs>=17.3.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.384) (23.1.0)\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.384) (6.0.4)\n", + "Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.384) (4.0.2)\n", + "Requirement already satisfied: yarl<2.0,>=1.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.384) (1.9.2)\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.384) (1.3.3)\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.28.0,>=0.27.8->litellm==0.1.384) (1.3.1)\n", + "Installing collected packages: litellm\n", + " Attempting uninstall: litellm\n", + " Found existing installation: litellm 0.1.379\n", + " Uninstalling litellm-0.1.379:\n", + " Successfully uninstalled litellm-0.1.379\n", + "Successfully installed litellm-0.1.384\n", + "\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m23.1.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m23.2.1\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49m/Library/Developer/CommandLineTools/usr/bin/python3 -m pip install --upgrade pip\u001b[0m\n" + ] + } + ], + "source": [ + "!pip install litellm==0.1.384" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "from litellm import completion\n", + "import asyncio" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "user_message = \"respond in 20 words. who are you?\"\n", + "messages = [{ \"content\": user_message,\"role\": \"user\"}]" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "async def get_response(generator):\n", + " response = \"\"\n", + " async for elem in generator:\n", + " print(elem)\n", + " response += elem[\"content\"]\n", + " return response" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "response = completion(model=\"llama2\", messages=messages, custom_api_base=\"http://localhost:11434\", custom_llm_provider=\"ollama\", stream=True)\n", + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "string_response = asyncio.run(get_response(response))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.6" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/litellm/utils.py b/litellm/utils.py index 2f8372e51..37928c9c4 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -766,11 +766,9 @@ async def get_ollama_response_stream(api_base="http://localhost:11434", model="l if chunk.strip() != "": j = json.loads(chunk) if "response" in j: - print(j["response"]) - yield { - "role": "assistant", - "content": j["response"] - } + completion_obj ={ "role": "assistant", "content": ""} + completion_obj["content"] = j["response"] + yield {"choices": [{"delta": completion_obj}]} # self.responses.append(j["response"]) # yield "blank" except Exception as e: diff --git a/pyproject.toml b/pyproject.toml index 207e96b5e..735003dc7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.1.384" +version = "0.1.385" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License"