diff --git a/docs/my-website/docs/proxy/logging.md b/docs/my-website/docs/proxy/logging.md
index c2e271224..577dba1c9 100644
--- a/docs/my-website/docs/proxy/logging.md
+++ b/docs/my-website/docs/proxy/logging.md
@@ -1,4 +1,7 @@
import Image from '@theme/IdealImage';
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+
# Logging - Custom Callbacks, OpenTelemetry, Langfuse, Sentry
@@ -490,6 +493,89 @@ Expected output on Langfuse
+### Logging Metadata to Langfuse
+
+
+
+
+
+
+Pass `metadata` as part of the request body
+
+```shell
+curl --location 'http://0.0.0.0:8000/chat/completions' \
+ --header 'Content-Type: application/json' \
+ --data '{
+ "model": "gpt-3.5-turbo",
+ "messages": [
+ {
+ "role": "user",
+ "content": "what llm are you"
+ }
+ ],
+ "metadata": {
+ "generation_name": "ishaan-test-generation",
+ "generation_id": "gen-id22",
+ "trace_id": "trace-id22",
+ "trace_user_id": "user-id2"
+ }
+}'
+```
+
+
+
+```python
+import openai
+client = openai.OpenAI(
+ api_key="anything",
+ base_url="http://0.0.0.0:8000"
+)
+
+# request sent to model set on litellm proxy, `litellm --model`
+response = client.chat.completions.create(model="gpt-3.5-turbo", messages = [
+ {
+ "role": "user",
+ "content": "this is a test request, write a short poem"
+ }
+])
+
+print(response)
+
+```
+
+
+
+```python
+from langchain.chat_models import ChatOpenAI
+from langchain.prompts.chat import (
+ ChatPromptTemplate,
+ HumanMessagePromptTemplate,
+ SystemMessagePromptTemplate,
+)
+from langchain.schema import HumanMessage, SystemMessage
+
+chat = ChatOpenAI(
+ openai_api_base="http://0.0.0.0:8000", # set openai_api_base to the LiteLLM Proxy
+ model = "gpt-3.5-turbo",
+ temperature=0.1
+)
+
+messages = [
+ SystemMessage(
+ content="You are a helpful assistant that im using to make a test request to."
+ ),
+ HumanMessage(
+ content="test from litellm. tell me why it's amazing in 1 sentence"
+ ),
+]
+response = chat(messages)
+
+print(response)
+```
+
+
+
+
## Logging Proxy Input/Output - DynamoDB
We will use the `--config` to set