diff --git a/litellm/proxy/proxy_load_test/locustfile.py b/litellm/proxy/proxy_load_test/locustfile.py index 2cd2e2fcce..f57ae9208f 100644 --- a/litellm/proxy/proxy_load_test/locustfile.py +++ b/litellm/proxy/proxy_load_test/locustfile.py @@ -8,6 +8,7 @@ class MyUser(HttpUser): def chat_completion(self): headers = { "Content-Type": "application/json", + "Authorization": f"Bearer sk-1234", # Include any additional headers you may need for authentication, etc. } diff --git a/litellm/proxy/proxy_load_test/openai_endpoint.py b/litellm/proxy/proxy_load_test/openai_endpoint.py index b3291ce709..3394b9c6fe 100644 --- a/litellm/proxy/proxy_load_test/openai_endpoint.py +++ b/litellm/proxy/proxy_load_test/openai_endpoint.py @@ -6,6 +6,7 @@ from fastapi import FastAPI, Request, status, HTTPException, Depends from fastapi.responses import StreamingResponse from fastapi.security import OAuth2PasswordBearer from fastapi.middleware.cors import CORSMiddleware +import uuid app = FastAPI() @@ -23,7 +24,7 @@ app.add_middleware( @app.post("/v1/chat/completions") async def completion(request: Request): return { - "id": "chatcmpl-123", + "id": f"chatcmpl-{uuid.uuid4().hex}", "object": "chat.completion", "created": 1677652288, "model": "gpt-3.5-turbo-0125",