docs update

This commit is contained in:
ishaan-jaff 2023-09-09 19:00:04 -07:00
parent ad5668dbdf
commit cf3596a8b9
2 changed files with 39 additions and 0 deletions

View file

@ -6,6 +6,7 @@ liteLLM provides `success_callbacks` and `failure_callbacks`, making it easy for
liteLLM supports: liteLLM supports:
- [Custom Callback Functions](https://docs.litellm.ai/docs/observability/custom_callback)
- [LLMonitor](https://llmonitor.com/docs) - [LLMonitor](https://llmonitor.com/docs)
- [Helicone](https://docs.helicone.ai/introduction) - [Helicone](https://docs.helicone.ai/introduction)
- [Traceloop](https://traceloop.com/docs) - [Traceloop](https://traceloop.com/docs)

View file

@ -10,6 +10,11 @@ def custom_callback(
start_time, end_time # start/end time start_time, end_time # start/end time
): ):
# Your custom code here # Your custom code here
print("LITELLM: in custom callback function")
print("kwargs", kwargs)
print("completion_response", completion_response)
print("start_time", start_time)
print("end_time", end_time)
``` ```
## Using Your Custom Callback Function ## Using Your Custom Callback Function
@ -35,6 +40,39 @@ print(response)
``` ```
### Custom Callback to write to Mixpanel
```python
import mixpanel
import litellm
from litellm import completion
def custom_callback(
kwargs, # kwargs to completion
completion_response, # response from completion
start_time, end_time # start/end time
):
# Your custom code here
mixpanel.track("LLM Response", {"llm_response": completion_response})
# Assign the custom callback function
litellm.success_callback = [custom_callback]
response = completion(
model="gpt-3.5-turbo",
messages=[
{
"role": "user",
"content": "Hi 👋 - i'm openai"
}
]
)
print(response)
```