mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
[BETA] Support OIDC role
based access to proxy (#8260)
* feat(proxy/_types.py): add new jwt field params allows users + services to auth into proxy * feat(handle_jwt.py): allow team role proxy access allows proxy admin to set allowed team roles * fix(proxy/_types.py): add 'routes' to role based permissions allow proxy admin to restrict what routes a team can access easily * feat(handle_jwt.py): support more flexible role based route access v2 on role based 'allowed_routes' * test(test_jwt.py): add unit test for rbac for proxy routes * feat(handle_jwt.py): ensure cost tracking always works for any jwt request with `enforce_rbac=True` * docs(token_auth.md): add documentation on controlling model access via OIDC Roles * test: increase time delay before retrying * test: handle model overloaded for test
This commit is contained in:
parent
7f06b88192
commit
4e34fc3bf8
10 changed files with 413 additions and 143 deletions
|
@ -468,7 +468,7 @@ class BaseLLMChatTest(ABC):
|
|||
"https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg",
|
||||
],
|
||||
)
|
||||
@pytest.mark.flaky(retries=4, delay=1)
|
||||
@pytest.mark.flaky(retries=4, delay=2)
|
||||
def test_image_url(self, detail, image_url):
|
||||
litellm.set_verbose = True
|
||||
from litellm.utils import supports_vision
|
||||
|
@ -515,9 +515,13 @@ class BaseLLMChatTest(ABC):
|
|||
],
|
||||
}
|
||||
]
|
||||
response = self.completion_function(
|
||||
**base_completion_call_args, messages=messages
|
||||
)
|
||||
try:
|
||||
response = self.completion_function(
|
||||
**base_completion_call_args, messages=messages
|
||||
)
|
||||
except litellm.InternalServerError:
|
||||
pytest.skip("Model is overloaded")
|
||||
|
||||
assert response is not None
|
||||
|
||||
@pytest.mark.flaky(retries=4, delay=1)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue