forked from phoenix/litellm-mirror
add langfuse to docs
This commit is contained in:
parent
f466c443fe
commit
01d4a4bb0b
4 changed files with 44 additions and 0 deletions
|
@ -4,6 +4,7 @@
|
|||
| ----------- | -------------------------------------------------------- | ---------------------------------------- |
|
||||
| Promptlayer | `PROMPLAYER_API_KEY` | `litellm.success_callback=["promptlayer"]` |
|
||||
| LLMonitor | `LLMONITOR_APP_ID` | `litellm.success_callback=["llmonitor"]` |
|
||||
| LangFuse | `LANGFUSE_PUBLIC_KEY`, `LANGFUSE_PRIVATE_KEY` | `litellm.success_callback=["langfuse"]` |
|
||||
| Sentry | `SENTRY_API_URL` | `litellm.success_callback=["sentry"]` |
|
||||
| Posthog | `POSTHOG_API_KEY`,`POSTHOG_API_URL` | `litellm.success_callback=["posthog"]` |
|
||||
| Slack | `SLACK_API_TOKEN`,`SLACK_API_SECRET`,`SLACK_API_CHANNEL` | `litellm.success_callback=["slack"]` |
|
||||
|
|
42
docs/my-website/docs/observability/langfuse_integration.md
Normal file
42
docs/my-website/docs/observability/langfuse_integration.md
Normal file
|
@ -0,0 +1,42 @@
|
|||
# LangFuse Tutorial
|
||||
|
||||
LangFuse is open Source Observability & Analytics for LLM Apps
|
||||
Detailed production traces and a granular view on quality, cost and latency
|
||||
|
||||
<Image img={require('../../img/langfuse.gif')} />
|
||||
|
||||
## Use Langfuse to log requests across all LLM Providers (OpenAI, Azure, Anthropic, Cohere, Replicate, PaLM)
|
||||
|
||||
liteLLM provides `callbacks`, making it easy for you to log data depending on the status of your responses.
|
||||
|
||||
### Using Callbacks
|
||||
|
||||
Get your Langfuse API Keys from https://cloud.langfuse.com/
|
||||
|
||||
Use just 2 lines of code, to instantly log your responses **across all providers** with langfuse:
|
||||
|
||||
```python
|
||||
litellm.success_callback = ["langfuse"]
|
||||
|
||||
```
|
||||
|
||||
Complete code
|
||||
|
||||
```python
|
||||
from litellm import completion
|
||||
|
||||
## set env variables
|
||||
os.environ["LANGFUSE_PUBLIC_KEY"] = "your key"
|
||||
os.environ["LANGFUSE_SECRET_KEY"] = "your key"
|
||||
|
||||
os.environ["OPENAI_API_KEY"], os.environ["COHERE_API_KEY"] = "", ""
|
||||
|
||||
# set callbacks
|
||||
litellm.success_callback = ["langfuse"]
|
||||
|
||||
#openai call
|
||||
response = completion(model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hi 👋 - i'm openai"}])
|
||||
|
||||
#cohere call
|
||||
response = completion(model="command-nightly", messages=[{"role": "user", "content": "Hi 👋 - i'm cohere"}])
|
||||
```
|
BIN
docs/my-website/img/langfuse.gif
Normal file
BIN
docs/my-website/img/langfuse.gif
Normal file
Binary file not shown.
After Width: | Height: | Size: 8.5 MiB |
|
@ -65,6 +65,7 @@ const sidebars = {
|
|||
"observability/callbacks",
|
||||
"observability/integrations",
|
||||
"observability/promptlayer_integration",
|
||||
"observability/langfuse_integration",
|
||||
"observability/traceloop_integration",
|
||||
"observability/llmonitor_integration",
|
||||
"observability/helicone_integration",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue