litellm-mirror/litellm/proxy/tests/test_openai_request.py
2023-11-21 16:59:33 -08:00

15 lines
362 B
Python

import openai
client = openai.OpenAI(
api_key="anything",
base_url="http://0.0.0.0:8000"
)
# request sent to model set on litellm proxy, `litellm --model`
response = client.chat.completions.create(model="gpt-3.5-turbo", messages = [
{
"role": "user",
"content": "this is a test request, write a short poem"
}
])
print(response)