feat: traceloop docs

This commit is contained in:
Nir Gazit 2023-08-28 22:09:02 +02:00
parent 8a76c80039
commit c470ebf615
8 changed files with 50 additions and 2 deletions

View file

@ -8,6 +8,7 @@ liteLLM supports:
- [LLMonitor](https://llmonitor.com/docs) - [LLMonitor](https://llmonitor.com/docs)
- [Helicone](https://docs.helicone.ai/introduction) - [Helicone](https://docs.helicone.ai/introduction)
- [Traceloop](https://traceloop.com/docs)
- [Sentry](https://docs.sentry.io/platforms/python/) - [Sentry](https://docs.sentry.io/platforms/python/)
- [PostHog](https://posthog.com/docs/libraries/python) - [PostHog](https://posthog.com/docs/libraries/python)
- [Slack](https://slack.dev/bolt-python/concepts) - [Slack](https://slack.dev/bolt-python/concepts)
@ -25,6 +26,7 @@ litellm.failure_callback=["sentry", "llmonitor"]
os.environ['SENTRY_API_URL'], os.environ['SENTRY_API_TRACE_RATE']= "" os.environ['SENTRY_API_URL'], os.environ['SENTRY_API_TRACE_RATE']= ""
os.environ['POSTHOG_API_KEY'], os.environ['POSTHOG_API_URL'] = "api-key", "api-url" os.environ['POSTHOG_API_KEY'], os.environ['POSTHOG_API_URL'] = "api-key", "api-url"
os.environ["HELICONE_API_KEY"] = "" os.environ["HELICONE_API_KEY"] = ""
os.environ["TRACELOOP_API_KEY"] = ""
os.environ["LLMONITOR_APP_ID"] = "" os.environ["LLMONITOR_APP_ID"] = ""
response = completion(model="gpt-3.5-turbo", messages=messages) response = completion(model="gpt-3.5-turbo", messages=messages)

View file

@ -7,4 +7,5 @@
| Sentry | `SENTRY_API_URL` | `litellm.success_callback=["sentry"]` | | Sentry | `SENTRY_API_URL` | `litellm.success_callback=["sentry"]` |
| Posthog | `POSTHOG_API_KEY`,`POSTHOG_API_URL` | `litellm.success_callback=["posthog"]` | | Posthog | `POSTHOG_API_KEY`,`POSTHOG_API_URL` | `litellm.success_callback=["posthog"]` |
| Slack | `SLACK_API_TOKEN`,`SLACK_API_SECRET`,`SLACK_API_CHANNEL` | `litellm.success_callback=["slack"]` | | Slack | `SLACK_API_TOKEN`,`SLACK_API_SECRET`,`SLACK_API_CHANNEL` | `litellm.success_callback=["slack"]` |
| Traceloop | `TRACELOOP_API_TOKEN` | `litellm.success_callback=["traceloop"]` |
| Helicone | `HELICONE_API_TOKEN` | `litellm.success_callback=["helicone"]` | | Helicone | `HELICONE_API_TOKEN` | `litellm.success_callback=["helicone"]` |

View file

@ -0,0 +1,34 @@
# Traceloop Tutorial
[Traceloop](https://traceloop.com) is a platform for monitoring and debugging the quality of your LLM outputs.
It provides you with a way to track the performance of your LLM application; rollout changes with confidence; and debug issues in production.
It is based on [OpenTelemetry](https://opentelemetry.io), so it can provide full visibility to your LLM requests, as well vector DB usage, and other infra in your stack.
<Image img={require('../../img/traceloop_dash.png')} />
## Getting Started
First, sign up to get an API key on the [Traceloop dashboard](https://app.traceloop.com).
While Traceloop is still in beta, [ping them](nir@traceloop.com) and mention you're using LiteLLM to get your early access code.
Then, install the Traceloop SDK:
```bash
pip install traceloop
```
Use just 1 line of code, to instantly log your LLM responses:
```python
litellm.success_callback = ["traceloop"]
```
When running your app, make sure to set the `TRACELOOP_API_KEY` environment variable to your API key.
To get better visualizations on how your code behaves, you may want to annotate specific parts of your LLM chain. See [Traceloop docs on decorators](https://traceloop.com/docs/python-sdk/decorators) for more information.
Here's an example PR for such integration: https://github.com/Codium-ai/pr-agent/pull/244
## Support
For any question or issue with integration you can reach out to the Traceloop team on [Slack](https://join.slack.com/t/traceloopcommunity/shared_invite/zt-1plpfpm6r-zOHKI028VkpcWdobX65C~g) or via [email](mailto:dev@traceloop.com).

Binary file not shown.

After

Width:  |  Height:  |  Size: 65 KiB

View file

@ -0,0 +1,7 @@
import sys
from traceloop.tracing import Tracer
class TraceloopLogger:
def __init__(self):
self.tracer = Tracer.init(app_name=sys.argv[0])

View file

@ -9,6 +9,7 @@ import uuid
encoding = tiktoken.get_encoding("cl100k_base") encoding = tiktoken.get_encoding("cl100k_base")
import importlib.metadata import importlib.metadata
from .integrations.traceloop import TraceloopLogger
from .integrations.helicone import HeliconeLogger from .integrations.helicone import HeliconeLogger
from .integrations.aispend import AISpendLogger from .integrations.aispend import AISpendLogger
from .integrations.berrispend import BerriSpendLogger from .integrations.berrispend import BerriSpendLogger
@ -729,7 +730,7 @@ def load_test_model(
def set_callbacks(callback_list): def set_callbacks(callback_list):
global sentry_sdk_instance, capture_exception, add_breadcrumb, posthog, slack_app, alerts_channel, heliconeLogger, aispendLogger, berrispendLogger, supabaseClient, liteDebuggerClient, llmonitorLogger, promptLayerLogger global sentry_sdk_instance, capture_exception, add_breadcrumb, posthog, slack_app, alerts_channel, traceloopLogger, heliconeLogger, aispendLogger, berrispendLogger, supabaseClient, liteDebuggerClient, llmonitorLogger, promptLayerLogger
try: try:
for callback in callback_list: for callback in callback_list:
print_verbose(f"callback: {callback}") print_verbose(f"callback: {callback}")
@ -782,6 +783,8 @@ def set_callbacks(callback_list):
) )
alerts_channel = os.environ["SLACK_API_CHANNEL"] alerts_channel = os.environ["SLACK_API_CHANNEL"]
print_verbose(f"Initialized Slack App: {slack_app}") print_verbose(f"Initialized Slack App: {slack_app}")
elif callback == "traceloop":
traceloopLogger = TraceloopLogger()
elif callback == "helicone": elif callback == "helicone":
heliconeLogger = HeliconeLogger() heliconeLogger = HeliconeLogger()
elif callback == "llmonitor": elif callback == "llmonitor":

View file

@ -17,6 +17,7 @@ nav:
- Quick Start: advanced.md - Quick Start: advanced.md
- Output Integrations: client_integrations.md - Output Integrations: client_integrations.md
- LLMonitor Tutorial: llmonitor_integration.md - LLMonitor Tutorial: llmonitor_integration.md
- Traceloop Tutorial: traceloop_integration.md
- Helicone Tutorial: helicone_integration.md - Helicone Tutorial: helicone_integration.md
- Supabase Tutorial: supabase_integration.md - Supabase Tutorial: supabase_integration.md
- BerriSpend Tutorial: berrispend_integration.md - BerriSpend Tutorial: berrispend_integration.md

View file

@ -33,7 +33,7 @@
- Call all models using the OpenAI format - `completion(model, messages)` - Call all models using the OpenAI format - `completion(model, messages)`
- Text responses will always be available at `['choices'][0]['message']['content']` - Text responses will always be available at `['choices'][0]['message']['content']`
- **Error Handling** Using Model Fallbacks (if `GPT-4` fails, try `llama2`) - **Error Handling** Using Model Fallbacks (if `GPT-4` fails, try `llama2`)
- **Logging** - Log Requests, Responses and Errors to `Supabase`, `Posthog`, `Mixpanel`, `Sentry`, `LLMonitor`, `Helicone` (Any of the supported providers here: https://litellm.readthedocs.io/en/latest/advanced/ - **Logging** - Log Requests, Responses and Errors to `Supabase`, `Posthog`, `Mixpanel`, `Sentry`, `LLMonitor`, `Traceloop`, `Helicone` (Any of the supported providers here: https://litellm.readthedocs.io/en/latest/advanced/
**Example: Logs sent to Supabase** **Example: Logs sent to Supabase**
<img width="1015" alt="Screenshot 2023-08-11 at 4 02 46 PM" src="https://github.com/ishaan-jaff/proxy-server/assets/29436595/237557b8-ba09-4917-982c-8f3e1b2c8d08"> <img width="1015" alt="Screenshot 2023-08-11 at 4 02 46 PM" src="https://github.com/ishaan-jaff/proxy-server/assets/29436595/237557b8-ba09-4917-982c-8f3e1b2c8d08">