litellm-mirror/litellm/proxy/proxy_load_test/locustfile.py
2024-03-12 18:52:49 -07:00

28 lines
904 B
Python

from locust import HttpUser, task, between
class MyUser(HttpUser):
wait_time = between(1, 5)
@task
def chat_completion(self):
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer sk-1234",
# Include any additional headers you may need for authentication, etc.
}
# Customize the payload with "model" and "messages" keys
payload = {
"model": "gpt-3.5-turbo",
"messages": [
{"role": "system", "content": "You are a chat bot."},
{"role": "user", "content": "Hello, how are you?"},
],
# Add more data as necessary
}
# Make a POST request to the "chat/completions" endpoint
response = self.client.post("chat/completions", json=payload, headers=headers)
# Print or log the response if needed