forked from phoenix/litellm-mirror
130 lines
626 KiB
Text
Vendored
130 lines
626 KiB
Text
Vendored
{
|
|
"cells": [
|
|
{
|
|
"attachments": {},
|
|
"cell_type": "markdown",
|
|
"metadata": {
|
|
"id": "9AxeMfikUw2A"
|
|
},
|
|
"source": [
|
|
"# Using LiteLLM with PromptLayer\n",
|
|
"Promptlayer allows you to track requests, responses and prompts\n",
|
|
"\n",
|
|
"LiteLLM allows you to use any litellm supported model and send data to promptlayer\n",
|
|
"\n",
|
|
"Getting started docs: https://docs.litellm.ai/docs/observability/promptlayer_integration"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {
|
|
"id": "VwgSvAcVCiJX"
|
|
},
|
|
"outputs": [],
|
|
"source": [
|
|
"!pip install litellm"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 18,
|
|
"metadata": {
|
|
"id": "r8QSgKbXFhpe"
|
|
},
|
|
"outputs": [],
|
|
"source": [
|
|
"import litellm\n",
|
|
"from litellm import completion\n",
|
|
"import os\n",
|
|
"os.environ['OPENAI_API_KEY'] = \"\"\n",
|
|
"os.environ['REPLICATE_API_TOKEN'] = \"\"\n",
|
|
"os.environ['PROMPTLAYER_API_KEY'] = \"pl_4ea2bb00a4dca1b8a70cebf2e9e11564\"\n",
|
|
"\n",
|
|
"# Set Promptlayer as a success callback\n",
|
|
"litellm.success_callback =['promptlayer']\n",
|
|
"\n"
|
|
]
|
|
},
|
|
{
|
|
"attachments": {},
|
|
"cell_type": "markdown",
|
|
"metadata": {
|
|
"id": "gaklMZhxVFBv"
|
|
},
|
|
"source": [
|
|
"## Call OpenAI with LiteLLM x PromptLayer"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
},
|
|
"id": "NOZL7MWiTFct",
|
|
"outputId": "039af693-c1d6-40ee-a081-0a494cf27c6a"
|
|
},
|
|
"outputs": [],
|
|
"source": [
|
|
"\n",
|
|
"result = completion(model=\"gpt-3.5-turbo\", messages=[{\"role\": \"user\", \"content\": \"gm this is ishaan\"}])\n",
|
|
"print(result)"
|
|
]
|
|
},
|
|
{
|
|
"attachments": {},
|
|
"cell_type": "markdown",
|
|
"metadata": {
|
|
"id": "Qt91CjpeVJ32"
|
|
},
|
|
"source": [
|
|
"## Call Replicate-CodeLlama with LiteLLM x PromptLayer"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
},
|
|
"id": "dTwhEKelDy_J",
|
|
"outputId": "751f7883-390f-47bd-9aa4-3b1523bd1af2"
|
|
},
|
|
"outputs": [],
|
|
"source": [
|
|
"model=\"replicate/codellama-13b:1c914d844307b0588599b8393480a3ba917b660c7e9dfae681542b5325f228db\"\n",
|
|
"\n",
|
|
"result = completion(model=model, messages=[{\"role\": \"user\", \"content\": \"gm this is ishaan\"}])\n",
|
|
"print(result)"
|
|
]
|
|
},
|
|
{
|
|
"attachments": {},
|
|
"cell_type": "markdown",
|
|
"metadata": {
|
|
"id": "qk-k6t8eVukF"
|
|
},
|
|
"source": [
|
|
"## View Logs on PromptLayer\n",
|
|
""
|
|
]
|
|
}
|
|
],
|
|
"metadata": {
|
|
"colab": {
|
|
"provenance": []
|
|
},
|
|
"kernelspec": {
|
|
"display_name": "Python 3",
|
|
"name": "python3"
|
|
},
|
|
"language_info": {
|
|
"name": "python"
|
|
}
|
|
},
|
|
"nbformat": 4,
|
|
"nbformat_minor": 0
|
|
}
|