forked from phoenix/litellm-mirror
Create locustfile.py
This commit is contained in:
parent
75e9bab41d
commit
07a6957cca
1 changed files with 28 additions and 0 deletions
28
.github/workflows/locustfile.py
vendored
Normal file
28
.github/workflows/locustfile.py
vendored
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
from locust import HttpUser, task, between
|
||||||
|
|
||||||
|
|
||||||
|
class MyUser(HttpUser):
|
||||||
|
wait_time = between(1, 5)
|
||||||
|
|
||||||
|
@task
|
||||||
|
def chat_completion(self):
|
||||||
|
headers = {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"Authorization": f"Bearer sk-1234",
|
||||||
|
# Include any additional headers you may need for authentication, etc.
|
||||||
|
}
|
||||||
|
|
||||||
|
# Customize the payload with "model" and "messages" keys
|
||||||
|
payload = {
|
||||||
|
"model": "fake-openai-endpoint",
|
||||||
|
"messages": [
|
||||||
|
{"role": "system", "content": "You are a chat bot."},
|
||||||
|
{"role": "user", "content": "Hello, how are you?"},
|
||||||
|
],
|
||||||
|
# Add more data as necessary
|
||||||
|
}
|
||||||
|
|
||||||
|
# Make a POST request to the "chat/completions" endpoint
|
||||||
|
response = self.client.post("chat/completions", json=payload, headers=headers)
|
||||||
|
|
||||||
|
# Print or log the response if needed
|
Loading…
Add table
Add a link
Reference in a new issue