diff --git a/Dockerfile b/Dockerfile index 0bb60f06f0..2900691b9b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,4 +7,4 @@ RUN pip install -r requirements.txt WORKDIR /app/litellm-proxy EXPOSE 8000 -CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file +CMD ["python3", "main.py"] \ No newline at end of file diff --git a/litellm-proxy/main.py b/litellm-proxy/main.py index d66ce3b815..30b52d0a9a 100644 --- a/litellm-proxy/main.py +++ b/litellm-proxy/main.py @@ -81,3 +81,7 @@ async def home(request: Request): app.include_router(router) + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/litellm-proxy/tests/test_bedrock.py b/litellm-proxy/tests/test_bedrock.py index 57816941cd..7f6bd5613d 100644 --- a/litellm-proxy/tests/test_bedrock.py +++ b/litellm-proxy/tests/test_bedrock.py @@ -1,5 +1,5 @@ import openai -openai.api_base = "http://127.0.0.1:8000" +openai.api_base = "http://0.0.0.0:8000" print("making request") openai.api_key = "anything" # this gets passed as a header