This commit is contained in:
ishaan-jaff 2023-09-21 12:33:39 -07:00
parent 9b3e40e4e2
commit 8d8501fe19
2 changed files with 6 additions and 30 deletions

View file

@ -2,37 +2,9 @@
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Defaulting to user installation because normal site-packages is not writeable\n",
"Requirement already satisfied: litellm==0.1.724 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (0.1.724)\n",
"Requirement already satisfied: importlib-metadata<7.0.0,>=6.8.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.724) (6.8.0)\n",
"Requirement already satisfied: openai<0.29.0,>=0.27.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.724) (0.28.0)\n",
"Requirement already satisfied: python-dotenv>=0.2.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.724) (1.0.0)\n",
"Requirement already satisfied: tiktoken<0.5.0,>=0.4.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from litellm==0.1.724) (0.4.0)\n",
"Requirement already satisfied: zipp>=0.5 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from importlib-metadata<7.0.0,>=6.8.0->litellm==0.1.724) (3.15.0)\n",
"Requirement already satisfied: requests>=2.20 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from openai<0.29.0,>=0.27.0->litellm==0.1.724) (2.28.2)\n",
"Requirement already satisfied: tqdm in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from openai<0.29.0,>=0.27.0->litellm==0.1.724) (4.65.0)\n",
"Requirement already satisfied: aiohttp in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from openai<0.29.0,>=0.27.0->litellm==0.1.724) (3.8.4)\n",
"Requirement already satisfied: regex>=2022.1.18 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from tiktoken<0.5.0,>=0.4.0->litellm==0.1.724) (2023.6.3)\n",
"Requirement already satisfied: charset-normalizer<4,>=2 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.29.0,>=0.27.0->litellm==0.1.724) (3.1.0)\n",
"Requirement already satisfied: idna<4,>=2.5 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.29.0,>=0.27.0->litellm==0.1.724) (3.4)\n",
"Requirement already satisfied: urllib3<1.27,>=1.21.1 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.29.0,>=0.27.0->litellm==0.1.724) (1.26.6)\n",
"Requirement already satisfied: certifi>=2017.4.17 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from requests>=2.20->openai<0.29.0,>=0.27.0->litellm==0.1.724) (2023.5.7)\n",
"Requirement already satisfied: attrs>=17.3.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.29.0,>=0.27.0->litellm==0.1.724) (23.1.0)\n",
"Requirement already satisfied: multidict<7.0,>=4.5 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.29.0,>=0.27.0->litellm==0.1.724) (6.0.4)\n",
"Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.29.0,>=0.27.0->litellm==0.1.724) (4.0.2)\n",
"Requirement already satisfied: yarl<2.0,>=1.0 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.29.0,>=0.27.0->litellm==0.1.724) (1.9.2)\n",
"Requirement already satisfied: frozenlist>=1.1.1 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.29.0,>=0.27.0->litellm==0.1.724) (1.3.3)\n",
"Requirement already satisfied: aiosignal>=1.1.2 in /Users/ishaanjaffer/Library/Python/3.9/lib/python/site-packages (from aiohttp->openai<0.29.0,>=0.27.0->litellm==0.1.724) (1.3.1)\n"
]
}
],
"outputs": [],
"source": [
"!pip install litellm # version 0.1.724 or higher "
]

View file

@ -1,6 +1,10 @@
# Ollama
LiteLLM supports all models from [Ollama](https://github.com/jmorganca/ollama)
<a target="_blank" href="https://colab.research.google.com/github/BerriAI/litellm/blob/main/cookbook/liteLLM_Ollama.ipynb">
<img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/>
</a>
## Pre-requisites
Ensure you have your ollama server running