litellm-mirror/litellm/proxy/tests/test_openai_request.py
2023-11-24 12:47:28 -08:00

15 lines
388 B
Python

# import openai
# client = openai.OpenAI(
# api_key="anything",
# base_url="http://0.0.0.0:8000"
# )
# # request sent to model set on litellm proxy, `litellm --model`
# response = client.chat.completions.create(model="gpt-3.5-turbo", messages = [
# {
# "role": "user",
# "content": "this is a test request, write a short poem"
# }
# ])
# print(response)