mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
langfuse cookbook
This commit is contained in:
parent
851cb86daa
commit
8890e471d9
1 changed files with 197 additions and 0 deletions
197
cookbook/logging_observability/LiteLLM_Langfuse.ipynb
vendored
Normal file
197
cookbook/logging_observability/LiteLLM_Langfuse.ipynb
vendored
Normal file
|
@ -0,0 +1,197 @@
|
|||
{
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"provenance": []
|
||||
},
|
||||
"kernelspec": {
|
||||
"name": "python3",
|
||||
"display_name": "Python 3"
|
||||
},
|
||||
"language_info": {
|
||||
"name": "python"
|
||||
}
|
||||
},
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"## Use LiteLLM with Langfuse\n",
|
||||
"https://docs.litellm.ai/docs/observability/langfuse_integration"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "4FbDOmcj2VkM"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"## Install Dependencies"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "21W8Woog26Ns"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"!pip install litellm langfuse"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "xrjKLBxhxu2L"
|
||||
},
|
||||
"execution_count": null,
|
||||
"outputs": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"## Set Env Variables"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "jHEu-TjZ29PJ"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"metadata": {
|
||||
"id": "QWd9rTysxsWO"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import litellm\n",
|
||||
"from litellm import completion\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"# from https://cloud.langfuse.com/\n",
|
||||
"os.environ[\"LANGFUSE_PUBLIC_KEY\"] = \"\"\n",
|
||||
"os.environ[\"LANGFUSE_SECRET_KEY\"] = \"\"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# OpenAI and Cohere keys\n",
|
||||
"# You can use any of the litellm supported providers: https://docs.litellm.ai/docs/providers\n",
|
||||
"os.environ['OPENAI_API_KEY']=\"\"\n",
|
||||
"os.environ['COHERE_API_KEY']=\"\"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"## Set LangFuse as a callback for sending data\n",
|
||||
"## OpenAI completion call"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "NodQl0hp3Lma"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# set langfuse as a callback, litellm will send the data to langfuse\n",
|
||||
"litellm.success_callback = [\"langfuse\"]\n",
|
||||
"\n",
|
||||
"# openai call\n",
|
||||
"response = completion(\n",
|
||||
" model=\"gpt-3.5-turbo\",\n",
|
||||
" messages=[\n",
|
||||
" {\"role\": \"user\", \"content\": \"Hi 👋 - i'm openai\"}\n",
|
||||
" ]\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"print(response)"
|
||||
],
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
},
|
||||
"id": "vNAuwJY1yp_F",
|
||||
"outputId": "c3a71e26-13f5-4379-fac9-409290ba79bb"
|
||||
},
|
||||
"execution_count": 8,
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"{\n",
|
||||
" \"id\": \"chatcmpl-85nP4xHdAP3jAcGneIguWATS9qdoO\",\n",
|
||||
" \"object\": \"chat.completion\",\n",
|
||||
" \"created\": 1696392238,\n",
|
||||
" \"model\": \"gpt-3.5-turbo-0613\",\n",
|
||||
" \"choices\": [\n",
|
||||
" {\n",
|
||||
" \"index\": 0,\n",
|
||||
" \"message\": {\n",
|
||||
" \"role\": \"assistant\",\n",
|
||||
" \"content\": \"Hello! How can I assist you today?\"\n",
|
||||
" },\n",
|
||||
" \"finish_reason\": \"stop\"\n",
|
||||
" }\n",
|
||||
" ],\n",
|
||||
" \"usage\": {\n",
|
||||
" \"prompt_tokens\": 15,\n",
|
||||
" \"completion_tokens\": 9,\n",
|
||||
" \"total_tokens\": 24\n",
|
||||
" }\n",
|
||||
"}\n"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# we set langfuse as a callback in the prev cell\n",
|
||||
"# cohere call\n",
|
||||
"response = completion(\n",
|
||||
" model=\"command-nightly\",\n",
|
||||
" messages=[\n",
|
||||
" {\"role\": \"user\", \"content\": \"Hi 👋 - i'm cohere\"}\n",
|
||||
" ]\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"print(response)"
|
||||
],
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
},
|
||||
"id": "2PMSLc_FziJL",
|
||||
"outputId": "1c37605e-b406-4ffc-aafd-e1983489c6be"
|
||||
},
|
||||
"execution_count": 9,
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"{\n",
|
||||
" \"object\": \"chat.completion\",\n",
|
||||
" \"choices\": [\n",
|
||||
" {\n",
|
||||
" \"finish_reason\": \"stop\",\n",
|
||||
" \"index\": 0,\n",
|
||||
" \"message\": {\n",
|
||||
" \"content\": \" Nice to meet you, Cohere! I'm excited to be meeting new members of the AI community\",\n",
|
||||
" \"role\": \"assistant\",\n",
|
||||
" \"logprobs\": null\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
" ],\n",
|
||||
" \"id\": \"chatcmpl-a14e903f-4608-4ceb-b996-8ebdf21360ca\",\n",
|
||||
" \"created\": 1696392247.3313863,\n",
|
||||
" \"model\": \"command-nightly\",\n",
|
||||
" \"usage\": {\n",
|
||||
" \"prompt_tokens\": 8,\n",
|
||||
" \"completion_tokens\": 20,\n",
|
||||
" \"total_tokens\": 28\n",
|
||||
" }\n",
|
||||
"}\n"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue