mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
build(litellm_server/main.py): add support for proxying azure endpoint
This commit is contained in:
parent
914067227a
commit
438b285451
1 changed files with 4 additions and 1 deletions
|
@ -94,10 +94,13 @@ async def embedding(request: Request):
|
||||||
|
|
||||||
@router.post("/v1/chat/completions")
|
@router.post("/v1/chat/completions")
|
||||||
@router.post("/chat/completions")
|
@router.post("/chat/completions")
|
||||||
async def chat_completion(request: Request):
|
@router.post("/openai/deployments/{model}/chat/completions") # azure compatible endpoint
|
||||||
|
async def chat_completion(request: Request, model: Optional[str] = None):
|
||||||
global llm_model_list
|
global llm_model_list
|
||||||
try:
|
try:
|
||||||
data = await request.json()
|
data = await request.json()
|
||||||
|
if model:
|
||||||
|
data["model"] = model
|
||||||
## CHECK KEYS ##
|
## CHECK KEYS ##
|
||||||
# default to always using the "ENV" variables, only if AUTH_STRATEGY==DYNAMIC then reads headers
|
# default to always using the "ENV" variables, only if AUTH_STRATEGY==DYNAMIC then reads headers
|
||||||
env_validation = litellm.validate_environment(model=data["model"])
|
env_validation = litellm.validate_environment(model=data["model"])
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue