forked from phoenix/litellm-mirror
docs - using tags OpenAI JS
This commit is contained in:
parent
305e884174
commit
b9fab4bd4d
1 changed files with 39 additions and 2 deletions
|
@ -115,7 +115,7 @@ client = openai.OpenAI(
|
|||
base_url="http://0.0.0.0:4000"
|
||||
)
|
||||
|
||||
# request sent to model set on litellm proxy, `litellm --model`
|
||||
|
||||
response = client.chat.completions.create(
|
||||
model="gpt-3.5-turbo",
|
||||
messages = [
|
||||
|
@ -126,7 +126,7 @@ response = client.chat.completions.create(
|
|||
],
|
||||
extra_body={
|
||||
"metadata": {
|
||||
"tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"]
|
||||
"tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"] # 👈 Key Change
|
||||
}
|
||||
}
|
||||
)
|
||||
|
@ -135,6 +135,43 @@ print(response)
|
|||
```
|
||||
</TabItem>
|
||||
|
||||
|
||||
<TabItem value="openai js" label="OpenAI JS">
|
||||
|
||||
Set `extra_body={"metadata": { }}` to `metadata` you want to pass
|
||||
|
||||
```js
|
||||
const openai = require('openai');
|
||||
|
||||
async function runOpenAI() {
|
||||
const client = new openai.OpenAI({
|
||||
apiKey: 'sk-1234',
|
||||
baseURL: 'http://0.0.0.0:4000'
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await client.chat.completions.create({
|
||||
model: 'gpt-3.5-turbo',
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: "this is a test request, write a short poem"
|
||||
},
|
||||
],
|
||||
tags: ["model-anthropic-claude-v2.1", "app-ishaan-prod"] // 👈 Key Change
|
||||
});
|
||||
console.log(response);
|
||||
} catch (error) {
|
||||
console.log("got this exception from server");
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
// Call the asynchronous function
|
||||
runOpenAI();
|
||||
```
|
||||
</TabItem>
|
||||
|
||||
<TabItem value="Curl" label="Curl Request">
|
||||
|
||||
Pass `metadata` as part of the request body
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue