diff --git a/docs/my-website/docs/proxy/enterprise.md b/docs/my-website/docs/proxy/enterprise.md
index 449c2ea17..f92688ee3 100644
--- a/docs/my-website/docs/proxy/enterprise.md
+++ b/docs/my-website/docs/proxy/enterprise.md
@@ -115,7 +115,7 @@ client = openai.OpenAI(
base_url="http://0.0.0.0:4000"
)
-# request sent to model set on litellm proxy, `litellm --model`
+
response = client.chat.completions.create(
model="gpt-3.5-turbo",
messages = [
@@ -126,7 +126,7 @@ response = client.chat.completions.create(
],
extra_body={
"metadata": {
- "tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"]
+ "tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"] # 👈 Key Change
}
}
)
@@ -135,6 +135,43 @@ print(response)
```
+
+
+
+Set `extra_body={"metadata": { }}` to `metadata` you want to pass
+
+```js
+const openai = require('openai');
+
+async function runOpenAI() {
+ const client = new openai.OpenAI({
+ apiKey: 'sk-1234',
+ baseURL: 'http://0.0.0.0:4000'
+ });
+
+ try {
+ const response = await client.chat.completions.create({
+ model: 'gpt-3.5-turbo',
+ messages: [
+ {
+ role: 'user',
+ content: "this is a test request, write a short poem"
+ },
+ ],
+ tags: ["model-anthropic-claude-v2.1", "app-ishaan-prod"] // 👈 Key Change
+ });
+ console.log(response);
+ } catch (error) {
+ console.log("got this exception from server");
+ console.error(error);
+ }
+}
+
+// Call the asynchronous function
+runOpenAI();
+```
+
+
Pass `metadata` as part of the request body