docs - show how to use with azure openai

This commit is contained in:
Ishaan Jaff 2024-07-09 18:33:22 -07:00
parent 3a06e2e425
commit de13d06ce6
2 changed files with 34 additions and 1 deletions

View file

@ -48,6 +48,39 @@ response = client.chat.completions.create(
} }
) )
print(response)
```
</TabItem>
<TabItem value="azureopenai" label="AzureOpenAI Python">
Set `extra_body={"metadata": { }}` to `metadata` you want to pass
```python
import openai
client = openai.AzureOpenAI(
api_key="anything",
base_url="http://0.0.0.0:4000"
)
# request sent to model set on litellm proxy, `litellm --model`
response = client.chat.completions.create(
model="gpt-3.5-turbo",
messages = [
{
"role": "user",
"content": "this is a test request, write a short poem"
}
],
extra_body={ # pass in any provider-specific param, if not supported by openai, https://docs.litellm.ai/docs/completion/input#provider-specific-params
"metadata": { # 👈 use for logging additional params (e.g. to langfuse)
"generation_name": "ishaan-generation-openai-client",
"generation_id": "openai-client-gen-id22",
"trace_id": "openai-client-trace-id22",
"trace_user_id": "openai-client-user-id2"
}
}
)
print(response) print(response)
``` ```
</TabItem> </TabItem>

View file

@ -37,6 +37,7 @@ const sidebars = {
href: "https://litellm-api.up.railway.app/", href: "https://litellm-api.up.railway.app/",
}, },
"proxy/enterprise", "proxy/enterprise",
"proxy/user_keys",
"proxy/demo", "proxy/demo",
"proxy/configs", "proxy/configs",
"proxy/reliability", "proxy/reliability",
@ -46,7 +47,6 @@ const sidebars = {
"proxy/team_budgets", "proxy/team_budgets",
"proxy/customers", "proxy/customers",
"proxy/billing", "proxy/billing",
"proxy/user_keys",
"proxy/virtual_keys", "proxy/virtual_keys",
"proxy/guardrails", "proxy/guardrails",
"proxy/token_auth", "proxy/token_auth",