mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
(fix) proxy server dockerfile
This commit is contained in:
parent
d9cc9fe566
commit
a4f4af1df3
3 changed files with 6 additions and 2 deletions
|
@ -7,4 +7,4 @@ RUN pip install -r requirements.txt
|
|||
|
||||
WORKDIR /app/litellm-proxy
|
||||
EXPOSE 8000
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
CMD ["python3", "main.py"]
|
|
@ -81,3 +81,7 @@ async def home(request: Request):
|
|||
|
||||
|
||||
app.include_router(router)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import openai
|
||||
openai.api_base = "http://127.0.0.1:8000"
|
||||
openai.api_base = "http://0.0.0.0:8000"
|
||||
print("making request")
|
||||
openai.api_key = "anything" # this gets passed as a header
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue