This commit is contained in:
colegottdank 2024-07-08 13:56:53 -07:00
parent cb52c59481
commit a3d9e34b26
6 changed files with 29 additions and 13 deletions

View file

@ -120,6 +120,7 @@ from litellm import completion
## set env variables for logging tools ## set env variables for logging tools
os.environ["LUNARY_PUBLIC_KEY"] = "your-lunary-public-key" os.environ["LUNARY_PUBLIC_KEY"] = "your-lunary-public-key"
os.environ["HELICONE_AUTH"] = "your-helicone-auth-key"
os.environ["LANGFUSE_PUBLIC_KEY"] = "" os.environ["LANGFUSE_PUBLIC_KEY"] = ""
os.environ["LANGFUSE_SECRET_KEY"] = "" os.environ["LANGFUSE_SECRET_KEY"] = ""
os.environ["ATHINA_API_KEY"] = "your-athina-api-key" os.environ["ATHINA_API_KEY"] = "your-athina-api-key"

View file

@ -87,6 +87,7 @@ from litellm import completion
## set env variables for logging tools ## set env variables for logging tools
os.environ["LUNARY_PUBLIC_KEY"] = "your-lunary-public-key" os.environ["LUNARY_PUBLIC_KEY"] = "your-lunary-public-key"
os.environ["HELICONE_API_KEY"] = "your-helicone-key"
os.environ["LANGFUSE_PUBLIC_KEY"] = "" os.environ["LANGFUSE_PUBLIC_KEY"] = ""
os.environ["LANGFUSE_SECRET_KEY"] = "" os.environ["LANGFUSE_SECRET_KEY"] = ""

View file

@ -310,6 +310,7 @@ LiteLLM exposes pre defined callbacks to send data to Lunary, Langfuse, Helicone
from litellm import completion from litellm import completion
## set env variables for logging tools ## set env variables for logging tools
os.environ["HELICONE_API_KEY"] = "your-helicone-key"
os.environ["LANGFUSE_PUBLIC_KEY"] = "" os.environ["LANGFUSE_PUBLIC_KEY"] = ""
os.environ["LANGFUSE_SECRET_KEY"] = "" os.environ["LANGFUSE_SECRET_KEY"] = ""
os.environ["LUNARY_PUBLIC_KEY"] = "your-lunary-public-key" os.environ["LUNARY_PUBLIC_KEY"] = "your-lunary-public-key"

View file

@ -1,4 +1,4 @@
# Helicone Tutorial # 🧠 Helicone - OSS LLM Observability Platform
:::tip :::tip
@ -7,21 +7,24 @@ https://github.com/BerriAI/litellm
::: :::
[Helicone](https://helicone.ai/) is an open source observability platform that proxies your OpenAI traffic and provides you key insights into your spend, latency and usage. [Helicone](https://helicone.ai/) is an open source observability platform that proxies your OpenAI traffic and provides you key insights into your spend, latency and usage.
## Use Helicone to log requests across all LLM Providers (OpenAI, Azure, Anthropic, Cohere, Replicate, PaLM) ## Use Helicone to log requests across all LLM Providers (OpenAI, Azure, Anthropic, Cohere, Replicate, PaLM)
liteLLM provides `success_callbacks` and `failure_callbacks`, making it easy for you to send data to a particular provider depending on the status of your responses. liteLLM provides `success_callbacks` and `failure_callbacks`, making it easy for you to send data to a particular provider depending on the status of your responses.
In this case, we want to log requests to Helicone when a request succeeds. In this case, we want to log requests to Helicone when a request succeeds.
### Approach 1: Use Callbacks ### Approach 1: Use Callbacks
Use just 1 line of code, to instantly log your responses **across all providers** with helicone: Use just 1 line of code, to instantly log your responses **across all providers** with helicone:
```python ```python
litellm.success_callback=["helicone"] litellm.success_callback=["helicone"]
``` ```
Complete code Complete code
```python ```python
from litellm import completion from litellm import completion
@ -40,6 +43,7 @@ response = completion(model="command-nightly", messages=[{"role": "user", "conte
``` ```
### Approach 2: [OpenAI + Azure only] Use Helicone as a proxy ### Approach 2: [OpenAI + Azure only] Use Helicone as a proxy
Helicone provides advanced functionality like caching, etc. Helicone currently supports this for Azure and OpenAI. Helicone provides advanced functionality like caching, etc. Helicone currently supports this for Azure and OpenAI.
If you want to use Helicone to proxy your OpenAI/Azure requests, then you can - If you want to use Helicone to proxy your OpenAI/Azure requests, then you can -
@ -48,6 +52,7 @@ If you want to use Helicone to proxy your OpenAI/Azure requests, then you can -
- Pass in helicone request headers via: `litellm.headers` - Pass in helicone request headers via: `litellm.headers`
Complete Code Complete Code
```python ```python
import litellm import litellm
from litellm import completion from litellm import completion
@ -62,3 +67,10 @@ response = litellm.completion(
print(response) print(response)
``` ```
### Group and visualize multi-step LLM interactions.
Track request flows across multiple traces and gain insights into complex AI workflows by adding only 2 simple headers.
- `Helicone-Session-Id` - The session id you want to track
- `Helicone-Session-Path` - The path of the session

View file

@ -186,6 +186,7 @@ const sidebars = {
type: "category", type: "category",
label: "Logging & Observability", label: "Logging & Observability",
items: [ items: [
"observability/helicone_integration",
"observability/langfuse_integration", "observability/langfuse_integration",
"observability/logfire_integration", "observability/logfire_integration",
"debugging/local_debugging", "debugging/local_debugging",
@ -202,7 +203,6 @@ const sidebars = {
"observability/athina_integration", "observability/athina_integration",
"observability/lunary_integration", "observability/lunary_integration",
"observability/greenscale_integration", "observability/greenscale_integration",
"observability/helicone_integration",
"observability/supabase_integration", "observability/supabase_integration",
`observability/telemetry`, `observability/telemetry`,
], ],

View file

@ -304,6 +304,7 @@ LiteLLM exposes pre defined callbacks to send data to Lunary, Langfuse, Helicone
from litellm import completion from litellm import completion
## set env variables for logging tools ## set env variables for logging tools
os.environ["HELICONE_API_KEY"] = "your-helicone-key"
os.environ["LANGFUSE_PUBLIC_KEY"] = "" os.environ["LANGFUSE_PUBLIC_KEY"] = ""
os.environ["LANGFUSE_SECRET_KEY"] = "" os.environ["LANGFUSE_SECRET_KEY"] = ""
os.environ["LUNARY_PUBLIC_KEY"] = "your-lunary-public-key" os.environ["LUNARY_PUBLIC_KEY"] = "your-lunary-public-key"