forked from phoenix/litellm-mirror
fix docs hierarchy
This commit is contained in:
parent
b49583893c
commit
408b9a4e22
2 changed files with 58 additions and 55 deletions
|
@ -1,72 +1,56 @@
|
|||
{
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"provenance": []
|
||||
},
|
||||
"kernelspec": {
|
||||
"name": "python3",
|
||||
"display_name": "Python 3"
|
||||
},
|
||||
"language_info": {
|
||||
"name": "python"
|
||||
}
|
||||
},
|
||||
"cells": [
|
||||
{
|
||||
"attachments": {},
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "WemkFEdDAnJL"
|
||||
},
|
||||
"source": [
|
||||
"## liteLLM Together AI Tutorial\n",
|
||||
"https://together.ai/\n"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "WemkFEdDAnJL"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"!pip install litellm==0.1.371"
|
||||
],
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"id": "pc6IO4V99O25"
|
||||
},
|
||||
"execution_count": null,
|
||||
"outputs": []
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"!pip install litellm==0.1.371"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {
|
||||
"id": "TMI3739_9q97"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"from litellm import completion\n",
|
||||
"os.environ[\"TOGETHER_AI_TOKEN\"] = \"\" #@param\n",
|
||||
"user_message = \"Hello, whats the weather in San Francisco??\"\n",
|
||||
"messages = [{ \"content\": user_message,\"role\": \"user\"}]"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "TMI3739_9q97"
|
||||
},
|
||||
"execution_count": 5,
|
||||
"outputs": []
|
||||
]
|
||||
},
|
||||
{
|
||||
"attachments": {},
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "bEqJ2HHjBJqq"
|
||||
},
|
||||
"source": [
|
||||
"## Calling togethercomputer/llama-2-70b-chat\n",
|
||||
"https://api.together.xyz/playground/chat?model=togethercomputer%2Fllama-2-70b-chat"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "bEqJ2HHjBJqq"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"model_name = \"togethercomputer/llama-2-70b-chat\"\n",
|
||||
"response = completion(model=model_name, messages=messages, together_ai=True)\n",
|
||||
"print(response)"
|
||||
],
|
||||
"execution_count": 6,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
|
@ -74,34 +58,34 @@
|
|||
"id": "Jrrt8puj523f",
|
||||
"outputId": "5a5b5beb-cda3-413e-8e83-4423d392cb44"
|
||||
},
|
||||
"execution_count": 6,
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{'choices': [{'finish_reason': 'stop', 'index': 0, 'message': {'role': 'assistant', 'content': \"\\n\\nI'm not able to provide real-time weather information. However, I can suggest\"}}], 'created': 1691629657.9288375, 'model': 'togethercomputer/llama-2-70b-chat', 'usage': {'prompt_tokens': 9, 'completion_tokens': 17, 'total_tokens': 26}}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"model_name = \"togethercomputer/llama-2-70b-chat\"\n",
|
||||
"response = completion(model=model_name, messages=messages, custom_llm_provider=\"together_ai\")\n",
|
||||
"print(response)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"attachments": {},
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"## With Streaming"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "sfWtgf-mBQcM"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"## With Streaming"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"response = completion(model=model_name, messages=messages, together_ai=True, stream=True)\n",
|
||||
"print(response)\n",
|
||||
"for chunk in response:\n",
|
||||
" print(chunk['choices'][0]['delta']) # same as openai format"
|
||||
],
|
||||
"execution_count": 8,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
|
@ -109,11 +93,10 @@
|
|||
"id": "wuBhlZtC6MH5",
|
||||
"outputId": "fcb82177-6494-4963-8e37-8716d3b9e616"
|
||||
},
|
||||
"execution_count": 8,
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"<litellm.utils.CustomStreamWrapper object at 0x7ad005e93ee0>\n",
|
||||
"{'role': 'assistant', 'content': '\\\\n'}\n",
|
||||
|
@ -136,7 +119,27 @@
|
|||
"{'role': 'assistant', 'content': ' can'}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"response = completion(model=model_name, messages=messages, stream=True, custom_llm_provider=\"together_ai\")\n",
|
||||
"print(response)\n",
|
||||
"for chunk in response:\n",
|
||||
" print(chunk['choices'][0]['delta']) # same as openai format"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"provenance": []
|
||||
},
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"name": "python"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue