mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
fix re-add virtual key auth checks on vertex ai pass thru endpoints (#5827)
This commit is contained in:
parent
e4f309d0e7
commit
16b0d38c11
3 changed files with 49 additions and 3 deletions
|
@ -3221,3 +3221,31 @@ async def test_key_list_unsupported_params(prisma_client):
|
|||
error_str = str(e.message)
|
||||
assert "Unsupported parameter" in error_str
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_auth_vertex_ai_route(prisma_client):
|
||||
"""
|
||||
If user is premium user and vertex-ai route is used. Assert Virtual Key checks are run
|
||||
"""
|
||||
litellm.set_verbose = True
|
||||
setattr(litellm.proxy.proxy_server, "prisma_client", prisma_client)
|
||||
setattr(litellm.proxy.proxy_server, "premium_user", True)
|
||||
setattr(litellm.proxy.proxy_server, "master_key", "sk-1234")
|
||||
await litellm.proxy.proxy_server.prisma_client.connect()
|
||||
|
||||
route = "/vertex-ai/publishers/google/models/gemini-1.5-flash-001:generateContent"
|
||||
request = Request(scope={"type": "http"})
|
||||
request._url = URL(url=route)
|
||||
request._headers = {"Authorization": "Bearer sk-12345"}
|
||||
try:
|
||||
await user_api_key_auth(request=request, api_key="Bearer " + "sk-12345")
|
||||
pytest.fail("Expected this call to fail. User is over limit.")
|
||||
except Exception as e:
|
||||
print(vars(e))
|
||||
print("error str=", str(e.message))
|
||||
error_str = str(e.message)
|
||||
assert e.code == "401"
|
||||
assert "Invalid proxy server token passed" in error_str
|
||||
|
||||
pass
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue