mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
docs(lago.md): add lago usage-based billing quick-start to docs
This commit is contained in:
parent
d43f75150a
commit
3acb31fa49
6 changed files with 121 additions and 6 deletions
114
docs/my-website/docs/observability/lago.md
Normal file
114
docs/my-website/docs/observability/lago.md
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
import Image from '@theme/IdealImage';
|
||||||
|
import Tabs from '@theme/Tabs';
|
||||||
|
import TabItem from '@theme/TabItem';
|
||||||
|
|
||||||
|
# Lago - Usage Based Billing
|
||||||
|
|
||||||
|
[Lago](https://www.getlago.com/) offers a self-hosted and cloud, metering and usage-based billing solution.
|
||||||
|
|
||||||
|
<Image img={require('../../img/lago.jpeg')} />
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
Use just 1 lines of code, to instantly log your responses **across all providers** with Lago
|
||||||
|
|
||||||
|
Get your Lago [API Key](https://docs.getlago.com/guide/self-hosted/docker#find-your-api-key)
|
||||||
|
|
||||||
|
```python
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
litellm.callbacks = ["lago"] # logs cost + usage of successful calls to lago
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
<Tabs>
|
||||||
|
<TabItem value="sdk" label="SDK">
|
||||||
|
|
||||||
|
```python
|
||||||
|
# pip install lago
|
||||||
|
import litellm
|
||||||
|
import os
|
||||||
|
|
||||||
|
os.environ["LAGO_API_BASE"] = "" # http://0.0.0.0:3000
|
||||||
|
os.environ["LAGO_API_KEY"] = ""
|
||||||
|
os.environ["LAGO_API_EVENT_CODE"] = "" # The billable metric's code - https://docs.getlago.com/guide/events/ingesting-usage#define-a-billable-metric
|
||||||
|
|
||||||
|
# LLM API Keys
|
||||||
|
os.environ['OPENAI_API_KEY']=""
|
||||||
|
|
||||||
|
# set lago as a callback, litellm will send the data to lago
|
||||||
|
litellm.success_callback = ["lago"]
|
||||||
|
|
||||||
|
# openai call
|
||||||
|
response = litellm.completion(
|
||||||
|
model="gpt-3.5-turbo",
|
||||||
|
messages=[
|
||||||
|
{"role": "user", "content": "Hi 👋 - i'm openai"}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
</TabItem>
|
||||||
|
<TabItem value="proxy" label="PROXY">
|
||||||
|
|
||||||
|
1. Add to Config.yaml
|
||||||
|
```yaml
|
||||||
|
model_list:
|
||||||
|
- litellm_params:
|
||||||
|
api_base: https://openai-function-calling-workers.tasslexyz.workers.dev/
|
||||||
|
api_key: my-fake-key
|
||||||
|
model: openai/my-fake-model
|
||||||
|
model_name: fake-openai-endpoint
|
||||||
|
|
||||||
|
litellm_settings:
|
||||||
|
callbacks: ["lago"] # 👈 KEY CHANGE
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Start Proxy
|
||||||
|
|
||||||
|
```
|
||||||
|
litellm --config /path/to/config.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Test it!
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl --location 'http://0.0.0.0:4000/chat/completions' \
|
||||||
|
--header 'Content-Type: application/json' \
|
||||||
|
--data ' {
|
||||||
|
"model": "fake-openai-endpoint",
|
||||||
|
"messages": [
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "what llm are you"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
'
|
||||||
|
```
|
||||||
|
|
||||||
|
</TabItem>
|
||||||
|
</Tabs>
|
||||||
|
|
||||||
|
|
||||||
|
<Image img={require('../../img/lago_2.png')} />
|
||||||
|
|
||||||
|
## Advanced - Lagos Logging object
|
||||||
|
|
||||||
|
This is what LiteLLM will log to Lagos
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
"event": {
|
||||||
|
"transaction_id": "<generated_unique_id>",
|
||||||
|
"external_customer_id": <litellm_end_user_id>, # passed via `user` param in /chat/completion call - https://platform.openai.com/docs/api-reference/chat/create
|
||||||
|
"code": os.getenv("LAGO_API_EVENT_CODE"),
|
||||||
|
"properties": {
|
||||||
|
"input_tokens": <number>,
|
||||||
|
"output_tokens": <number>,
|
||||||
|
"model": <string>,
|
||||||
|
"response_cost": <number>, # 👈 LITELLM CALCULATED RESPONSE COST - https://github.com/BerriAI/litellm/blob/d43f75150a65f91f60dc2c0c9462ce3ffc713c1f/litellm/utils.py#L1473
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
|
@ -20,7 +20,7 @@ Use just 2 lines of code, to instantly log your responses **across all providers
|
||||||
Get your OpenMeter API Key from https://openmeter.cloud/meters
|
Get your OpenMeter API Key from https://openmeter.cloud/meters
|
||||||
|
|
||||||
```python
|
```python
|
||||||
litellm.success_callback = ["openmeter"] # logs cost + usage of successful calls to openmeter
|
litellm.callbacks = ["openmeter"] # logs cost + usage of successful calls to openmeter
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ litellm.success_callback = ["openmeter"] # logs cost + usage of successful calls
|
||||||
<TabItem value="sdk" label="SDK">
|
<TabItem value="sdk" label="SDK">
|
||||||
|
|
||||||
```python
|
```python
|
||||||
# pip install langfuse
|
# pip install openmeter
|
||||||
import litellm
|
import litellm
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@ -39,8 +39,8 @@ os.environ["OPENMETER_API_KEY"] = ""
|
||||||
# LLM API Keys
|
# LLM API Keys
|
||||||
os.environ['OPENAI_API_KEY']=""
|
os.environ['OPENAI_API_KEY']=""
|
||||||
|
|
||||||
# set langfuse as a callback, litellm will send the data to langfuse
|
# set openmeter as a callback, litellm will send the data to openmeter
|
||||||
litellm.success_callback = ["openmeter"]
|
litellm.callbacks = ["openmeter"]
|
||||||
|
|
||||||
# openai call
|
# openai call
|
||||||
response = litellm.completion(
|
response = litellm.completion(
|
||||||
|
@ -64,7 +64,7 @@ model_list:
|
||||||
model_name: fake-openai-endpoint
|
model_name: fake-openai-endpoint
|
||||||
|
|
||||||
litellm_settings:
|
litellm_settings:
|
||||||
success_callback: ["openmeter"] # 👈 KEY CHANGE
|
callbacks: ["openmeter"] # 👈 KEY CHANGE
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Start Proxy
|
2. Start Proxy
|
||||||
|
|
BIN
docs/my-website/img/lago.jpeg
Normal file
BIN
docs/my-website/img/lago.jpeg
Normal file
Binary file not shown.
After Width: | Height: | Size: 344 KiB |
BIN
docs/my-website/img/lago_2.png
Normal file
BIN
docs/my-website/img/lago_2.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 176 KiB |
|
@ -175,6 +175,7 @@ const sidebars = {
|
||||||
"observability/custom_callback",
|
"observability/custom_callback",
|
||||||
"observability/langfuse_integration",
|
"observability/langfuse_integration",
|
||||||
"observability/sentry",
|
"observability/sentry",
|
||||||
|
"observability/lago",
|
||||||
"observability/openmeter",
|
"observability/openmeter",
|
||||||
"observability/promptlayer_integration",
|
"observability/promptlayer_integration",
|
||||||
"observability/wandb_integration",
|
"observability/wandb_integration",
|
||||||
|
|
|
@ -27,7 +27,7 @@ input_callback: List[Union[str, Callable]] = []
|
||||||
success_callback: List[Union[str, Callable]] = []
|
success_callback: List[Union[str, Callable]] = []
|
||||||
failure_callback: List[Union[str, Callable]] = []
|
failure_callback: List[Union[str, Callable]] = []
|
||||||
service_callback: List[Union[str, Callable]] = []
|
service_callback: List[Union[str, Callable]] = []
|
||||||
_custom_logger_compatible_callbacks_literal = Literal["lago"]
|
_custom_logger_compatible_callbacks_literal = Literal["lago", "openmeter"]
|
||||||
callbacks: List[Union[Callable, _custom_logger_compatible_callbacks_literal]] = []
|
callbacks: List[Union[Callable, _custom_logger_compatible_callbacks_literal]] = []
|
||||||
_langfuse_default_tags: Optional[
|
_langfuse_default_tags: Optional[
|
||||||
List[
|
List[
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue