mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 02:34:29 +00:00
support templates
This commit is contained in:
parent
4e74582b6f
commit
40eebc6bea
4 changed files with 368 additions and 9 deletions
348
cookbook/logging_observability/LiteLLM_Lunary.ipynb
vendored
Normal file
348
cookbook/logging_observability/LiteLLM_Lunary.ipynb
vendored
Normal file
|
@ -0,0 +1,348 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "4FbDOmcj2VkM"
|
||||
},
|
||||
"source": [
|
||||
"## Use LiteLLM with Langfuse\n",
|
||||
"https://docs.litellm.ai/docs/observability/langfuse_integration"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "21W8Woog26Ns"
|
||||
},
|
||||
"source": [
|
||||
"## Install Dependencies"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"id": "xrjKLBxhxu2L"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%pip install litellm lunary"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "jHEu-TjZ29PJ"
|
||||
},
|
||||
"source": [
|
||||
"## Set Env Variables"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {
|
||||
"id": "QWd9rTysxsWO"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import litellm\n",
|
||||
"from litellm import completion\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"# from https://app.lunary.ai/\n",
|
||||
"os.environ[\"LUNARY_PUBLIC_KEY\"] = \"a9ea1ae5-0727-4aac-8cda-10d1d4a7dd88\"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# LLM provider keys\n",
|
||||
"# You can use any of the litellm supported providers: https://docs.litellm.ai/docs/providers\n",
|
||||
"os.environ['OPENAI_API_KEY'] = \"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\"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"id": "NodQl0hp3Lma"
|
||||
},
|
||||
"source": [
|
||||
"## Set Lunary as a callback for sending data\n",
|
||||
"## OpenAI completion call"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
},
|
||||
"id": "vNAuwJY1yp_F",
|
||||
"outputId": "c3a71e26-13f5-4379-fac9-409290ba79bb"
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"[Choices(finish_reason='stop', index=0, message=Message(content='Hello! How can I assist you today?', role='assistant'))]ModelResponse(id='chatcmpl-8xIWykI0GiJSmYtXYuB8Z363kpIBm', choices=[Choices(finish_reason='stop', index=0, message=Message(content='Hello! How can I assist you today?', role='assistant'))], created=1709143276, model='gpt-3.5-turbo-0125', object='chat.completion', system_fingerprint='fp_86156a94a0', usage=Usage(completion_tokens=9, prompt_tokens=15, total_tokens=24))\n",
|
||||
"\n",
|
||||
"[Lunary] Add event: {\n",
|
||||
" \"event\": \"start\",\n",
|
||||
" \"type\": \"llm\",\n",
|
||||
" \"name\": \"gpt-3.5-turbo\",\n",
|
||||
" \"runId\": \"a363776a-bd07-4474-bce2-193067f01b2e\",\n",
|
||||
" \"timestamp\": \"2024-02-28T18:01:15.188153+00:00\",\n",
|
||||
" \"input\": {\n",
|
||||
" \"role\": \"user\",\n",
|
||||
" \"content\": \"Hi \\ud83d\\udc4b - i'm openai\"\n",
|
||||
" },\n",
|
||||
" \"extra\": {},\n",
|
||||
" \"runtime\": \"litellm\",\n",
|
||||
" \"metadata\": {}\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"[Lunary] Add event: {\n",
|
||||
" \"event\": \"end\",\n",
|
||||
" \"type\": \"llm\",\n",
|
||||
" \"runId\": \"a363776a-bd07-4474-bce2-193067f01b2e\",\n",
|
||||
" \"timestamp\": \"2024-02-28T18:01:16.846581+00:00\",\n",
|
||||
" \"output\": {\n",
|
||||
" \"role\": \"assistant\",\n",
|
||||
" \"content\": \"Hello! How can I assist you today?\"\n",
|
||||
" },\n",
|
||||
" \"runtime\": \"litellm\",\n",
|
||||
" \"tokensUsage\": {\n",
|
||||
" \"completion\": 9,\n",
|
||||
" \"prompt\": 15\n",
|
||||
" }\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"--- Logging error ---\n",
|
||||
"Traceback (most recent call last):\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/urllib3/connectionpool.py\", line 537, in _make_request\n",
|
||||
" response = conn.getresponse()\n",
|
||||
" ^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/urllib3/connection.py\", line 466, in getresponse\n",
|
||||
" httplib_response = super().getresponse()\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"/opt/homebrew/Cellar/python@3.12/3.12.2_1/Frameworks/Python.framework/Versions/3.12/lib/python3.12/http/client.py\", line 1423, in getresponse\n",
|
||||
" response.begin()\n",
|
||||
" File \"/opt/homebrew/Cellar/python@3.12/3.12.2_1/Frameworks/Python.framework/Versions/3.12/lib/python3.12/http/client.py\", line 331, in begin\n",
|
||||
" version, status, reason = self._read_status()\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"/opt/homebrew/Cellar/python@3.12/3.12.2_1/Frameworks/Python.framework/Versions/3.12/lib/python3.12/http/client.py\", line 292, in _read_status\n",
|
||||
" line = str(self.fp.readline(_MAXLINE + 1), \"iso-8859-1\")\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"/opt/homebrew/Cellar/python@3.12/3.12.2_1/Frameworks/Python.framework/Versions/3.12/lib/python3.12/socket.py\", line 707, in readinto\n",
|
||||
" return self._sock.recv_into(b)\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
"TimeoutError: timed out\n",
|
||||
"\n",
|
||||
"The above exception was the direct cause of the following exception:\n",
|
||||
"\n",
|
||||
"Traceback (most recent call last):\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/requests/adapters.py\", line 486, in send\n",
|
||||
" resp = conn.urlopen(\n",
|
||||
" ^^^^^^^^^^^^^\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/urllib3/connectionpool.py\", line 847, in urlopen\n",
|
||||
" retries = retries.increment(\n",
|
||||
" ^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/urllib3/util/retry.py\", line 470, in increment\n",
|
||||
" raise reraise(type(error), error, _stacktrace)\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/urllib3/util/util.py\", line 39, in reraise\n",
|
||||
" raise value\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/urllib3/connectionpool.py\", line 793, in urlopen\n",
|
||||
" response = self._make_request(\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/urllib3/connectionpool.py\", line 539, in _make_request\n",
|
||||
" self._raise_timeout(err=e, url=url, timeout_value=read_timeout)\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/urllib3/connectionpool.py\", line 370, in _raise_timeout\n",
|
||||
" raise ReadTimeoutError(\n",
|
||||
"urllib3.exceptions.ReadTimeoutError: HTTPConnectionPool(host='localhost', port=3333): Read timed out. (read timeout=5)\n",
|
||||
"\n",
|
||||
"During handling of the above exception, another exception occurred:\n",
|
||||
"\n",
|
||||
"Traceback (most recent call last):\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/lunary/consumer.py\", line 59, in send_batch\n",
|
||||
" response = requests.post(\n",
|
||||
" ^^^^^^^^^^^^^^\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/requests/api.py\", line 115, in post\n",
|
||||
" return request(\"post\", url, data=data, json=json, **kwargs)\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/requests/api.py\", line 59, in request\n",
|
||||
" return session.request(method=method, url=url, **kwargs)\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/requests/sessions.py\", line 589, in request\n",
|
||||
" resp = self.send(prep, **send_kwargs)\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/requests/sessions.py\", line 703, in send\n",
|
||||
" r = adapter.send(request, **kwargs)\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/requests/adapters.py\", line 532, in send\n",
|
||||
" raise ReadTimeout(e, request=request)\n",
|
||||
"requests.exceptions.ReadTimeout: HTTPConnectionPool(host='localhost', port=3333): Read timed out. (read timeout=5)\n",
|
||||
"\n",
|
||||
"During handling of the above exception, another exception occurred:\n",
|
||||
"\n",
|
||||
"Traceback (most recent call last):\n",
|
||||
" File \"/opt/homebrew/Cellar/python@3.12/3.12.2_1/Frameworks/Python.framework/Versions/3.12/lib/python3.12/logging/__init__.py\", line 1160, in emit\n",
|
||||
" msg = self.format(record)\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"/opt/homebrew/Cellar/python@3.12/3.12.2_1/Frameworks/Python.framework/Versions/3.12/lib/python3.12/logging/__init__.py\", line 999, in format\n",
|
||||
" return fmt.format(record)\n",
|
||||
" ^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"/opt/homebrew/Cellar/python@3.12/3.12.2_1/Frameworks/Python.framework/Versions/3.12/lib/python3.12/logging/__init__.py\", line 703, in format\n",
|
||||
" record.message = record.getMessage()\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"/opt/homebrew/Cellar/python@3.12/3.12.2_1/Frameworks/Python.framework/Versions/3.12/lib/python3.12/logging/__init__.py\", line 392, in getMessage\n",
|
||||
" msg = msg % self.args\n",
|
||||
" ~~~~^~~~~~~~~~~\n",
|
||||
"TypeError: not all arguments converted during string formatting\n",
|
||||
"Call stack:\n",
|
||||
" File \"/opt/homebrew/Cellar/python@3.12/3.12.2_1/Frameworks/Python.framework/Versions/3.12/lib/python3.12/threading.py\", line 1030, in _bootstrap\n",
|
||||
" self._bootstrap_inner()\n",
|
||||
" File \"/opt/homebrew/Cellar/python@3.12/3.12.2_1/Frameworks/Python.framework/Versions/3.12/lib/python3.12/threading.py\", line 1073, in _bootstrap_inner\n",
|
||||
" self.run()\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/lunary/consumer.py\", line 24, in run\n",
|
||||
" self.send_batch()\n",
|
||||
" File \"/Users/vince/Library/Caches/pypoetry/virtualenvs/litellm-7WKnDWGw-py3.12/lib/python3.12/site-packages/lunary/consumer.py\", line 73, in send_batch\n",
|
||||
" logging.error(\"[Lunary] Error sending events\", e)\n",
|
||||
"Message: '[Lunary] Error sending events'\n",
|
||||
"Arguments: (ReadTimeout(ReadTimeoutError(\"HTTPConnectionPool(host='localhost', port=3333): Read timed out. (read timeout=5)\")),)\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# set langfuse as a callback, litellm will send the data to langfuse\n",
|
||||
"litellm.success_callback = [\"lunary\"]\n",
|
||||
"\n",
|
||||
"# openai call\n",
|
||||
"response = completion(\n",
|
||||
" model=\"gpt-3.5-turbo\",\n",
|
||||
" messages=[\n",
|
||||
" {\"role\": \"user\", \"content\": \"Hi 👋 - i'm openai\"}\n",
|
||||
" ]\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"print(response)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Using LiteLLM with Lunary Templates\n",
|
||||
"\n",
|
||||
"You can use LiteLLM seamlessly with Lunary templates to manage your prompts and completions.\n",
|
||||
"\n",
|
||||
"Assuming you have created a template \"test-template\" with a variable \"question\", you can use it like this:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
},
|
||||
"id": "2PMSLc_FziJL",
|
||||
"outputId": "1c37605e-b406-4ffc-aafd-e1983489c6be"
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"[Choices(finish_reason='stop', index=0, message=Message(content='Hello! How can I assist you today?', role='assistant'))]ModelResponse(id='chatcmpl-8xIXegwpudg4YKnLB6pmpFGXqTHcH', choices=[Choices(finish_reason='stop', index=0, message=Message(content='Hello! How can I assist you today?', role='assistant'))], created=1709143318, model='gpt-4-0125-preview', object='chat.completion', system_fingerprint='fp_c8aa5a06d6', usage=Usage(completion_tokens=9, prompt_tokens=21, total_tokens=30))\n",
|
||||
"\n",
|
||||
"[Lunary] Add event: {\n",
|
||||
" \"event\": \"start\",\n",
|
||||
" \"type\": \"llm\",\n",
|
||||
" \"name\": \"gpt-4-turbo-preview\",\n",
|
||||
" \"runId\": \"3a5b698d-cb55-4b3b-ab6d-04d2b99e40cb\",\n",
|
||||
" \"timestamp\": \"2024-02-28T18:01:56.746249+00:00\",\n",
|
||||
" \"input\": [\n",
|
||||
" {\n",
|
||||
" \"role\": \"system\",\n",
|
||||
" \"content\": \"You are an helpful assistant.\"\n",
|
||||
" },\n",
|
||||
" {\n",
|
||||
" \"role\": \"user\",\n",
|
||||
" \"content\": \"Hi! Hello!\"\n",
|
||||
" }\n",
|
||||
" ],\n",
|
||||
" \"extra\": {\n",
|
||||
" \"temperature\": 1,\n",
|
||||
" \"max_tokens\": 100\n",
|
||||
" },\n",
|
||||
" \"runtime\": \"litellm\",\n",
|
||||
" \"metadata\": {}\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"[Lunary] Add event: {\n",
|
||||
" \"event\": \"end\",\n",
|
||||
" \"type\": \"llm\",\n",
|
||||
" \"runId\": \"3a5b698d-cb55-4b3b-ab6d-04d2b99e40cb\",\n",
|
||||
" \"timestamp\": \"2024-02-28T18:01:58.741244+00:00\",\n",
|
||||
" \"output\": {\n",
|
||||
" \"role\": \"assistant\",\n",
|
||||
" \"content\": \"Hello! How can I assist you today?\"\n",
|
||||
" },\n",
|
||||
" \"runtime\": \"litellm\",\n",
|
||||
" \"tokensUsage\": {\n",
|
||||
" \"completion\": 9,\n",
|
||||
" \"prompt\": 21\n",
|
||||
" }\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import lunary\n",
|
||||
"from litellm import completion\n",
|
||||
"\n",
|
||||
"template = lunary.render_template(\"test-template\", {\"question\": \"Hello!\"})\n",
|
||||
"\n",
|
||||
"response = completion(**template)\n",
|
||||
"\n",
|
||||
"print(response)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"provenance": []
|
||||
},
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.12.2"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue