docs(quick_start.md): add example of using proxy with litellm sdk

This commit is contained in:
Krrish Dholakia 2024-04-20 08:47:22 -07:00
parent 200f58c1e7
commit 2afdf34a9f

View file

@ -283,6 +283,27 @@ response = client.chat.completions.create(model="gpt-3.5-turbo", messages = [
print(response) print(response)
```
</TabItem>
<TabItem value="litellm" label="LiteLLM SDK">
```python
from litellm import completion
response = completion(
model="openai/gpt-3.5-turbo",
messages = [
{
"role": "user",
"content": "this is a test request, write a short poem"
}
],
api_key="anything",
base_url="http://0.0.0.0:4000"
)
print(response)
``` ```
</TabItem> </TabItem>
<TabItem value="langchain" label="Langchain"> <TabItem value="langchain" label="Langchain">