mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
Allow non-admins to use OpenAI routes
The problem we were having is non-admin users trying to use `/engines/{model}/chat/completions` were getting an HTTP 401 error. ```shell $ curl -sSL 'http://localhost:4000/engines/gpt-35-turbo-0125/chat/completions' \ --header "Authorization: Bearer ${LITELLM_KEY}" \ --header 'Content-Type: application/json' \ --data ' { "model": "gpt-35-turbo-0125", "messages": [ { "role": "user", "content": "Write a poem about LiteLLM" } ] }' \ | jq '.' { "error": { "message": "Authentication Error, Only proxy admin can be used to generate, delete, update info for new keys/users/teams. Route=/engines/gpt-35-turbo-0125/chat/completions. Your role=unknown. Your user_id=someone@company.com", "type": "auth_error", "param": "None", "code": 401 } } ``` This seems to be related to code in `user_api_key_auth` that checks that the URL matches a list of routes that are allowed for non-admin users, where the list of routes is in `LiteLLMRoutes.openai_routes.value`. The problem is that the route `/engines/{model}/chat/completions` is not in that list and furthermore, that wouldn't even work if it were, because the comparison is done with `request.url.path` and that will have the actual model name in it (e.g.: `gpt-35-turbo-0125`), rather than `{model}`. I added a new list `LiteLLMRoutes.openai_route_names` and added the route **names** to that list. Then I added a check in `user_api_key_auth` to see if the route name is in the list of route names.
This commit is contained in:
parent
6d8ea641ec
commit
ddf77656fb
2 changed files with 12 additions and 0 deletions
|
@ -77,6 +77,16 @@ class LiteLLM_UpperboundKeyGenerateParams(LiteLLMBase):
|
||||||
|
|
||||||
|
|
||||||
class LiteLLMRoutes(enum.Enum):
|
class LiteLLMRoutes(enum.Enum):
|
||||||
|
openai_route_names: List = [
|
||||||
|
"chat_completion",
|
||||||
|
"completion",
|
||||||
|
"embeddings",
|
||||||
|
"image_generation",
|
||||||
|
"audio_transcriptions",
|
||||||
|
"moderations",
|
||||||
|
"model_info_v1",
|
||||||
|
"model_info_v2",
|
||||||
|
]
|
||||||
openai_routes: List = [
|
openai_routes: List = [
|
||||||
# chat completions
|
# chat completions
|
||||||
"/openai/deployments/{model}/chat/completions",
|
"/openai/deployments/{model}/chat/completions",
|
||||||
|
|
|
@ -1073,6 +1073,8 @@ async def user_api_key_auth(
|
||||||
if not _is_user_proxy_admin(user_id_information): # if non-admin
|
if not _is_user_proxy_admin(user_id_information): # if non-admin
|
||||||
if route in LiteLLMRoutes.openai_routes.value:
|
if route in LiteLLMRoutes.openai_routes.value:
|
||||||
pass
|
pass
|
||||||
|
elif request['route'].name in LiteLLMRoutes.openai_route_names.value:
|
||||||
|
pass
|
||||||
elif (
|
elif (
|
||||||
route in LiteLLMRoutes.info_routes.value
|
route in LiteLLMRoutes.info_routes.value
|
||||||
): # check if user allowed to call an info route
|
): # check if user allowed to call an info route
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue