docs - langsmith

This commit is contained in:
Ishaan Jaff 2024-07-22 15:13:11 -07:00
parent 4850819ea0
commit d3bced56bb
3 changed files with 49 additions and 2 deletions

View file

@ -1,6 +1,6 @@
import Image from '@theme/IdealImage';
# Langsmith - Logging LLM Input/Output
# 🦜 Langsmith - Logging LLM Input/Output
:::tip

View file

@ -1106,6 +1106,52 @@ environment_variables:
```
2. Start Proxy
```
litellm --config /path/to/config.yaml
```
3. Test it!
```bash
curl --location 'http://0.0.0.0:4000/chat/completions' \
--header 'Content-Type: application/json' \
--data ' {
"model": "fake-openai-endpoint",
"messages": [
{
"role": "user",
"content": "Hello, Claude gm!"
}
],
}
'
```
Expect to see your log on Langfuse
<Image img={require('../../img/langsmith_new.png')} />
## Logging LLM IO to Arize AI
1. Set `success_callback: ["arize"]` on litellm config.yaml
```yaml
model_list:
- model_name: gpt-4
litellm_params:
model: openai/fake
api_key: fake-key
api_base: https://exampleopenaiendpoint-production.up.railway.app/
litellm_settings:
callbacks: ["arize"]
environment_variables:
ARIZE_SPACE_KEY: "d0*****"
ARIZE_API_KEY: "141a****"
```
2. Start Proxy
```

View file

@ -192,6 +192,8 @@ const sidebars = {
items: [
"observability/langfuse_integration",
"observability/logfire_integration",
"observability/langsmith_integration",
"observability/arize_integration",
"debugging/local_debugging",
"observability/raw_request_response",
"observability/custom_callback",
@ -202,7 +204,6 @@ const sidebars = {
"observability/openmeter",
"observability/promptlayer_integration",
"observability/wandb_integration",
"observability/langsmith_integration",
"observability/slack_integration",
"observability/traceloop_integration",
"observability/athina_integration",