fix test_master_key_hashing

This commit is contained in:
Ishaan Jaff 2024-08-21 17:56:09 -07:00
parent 0ea1f367d7
commit f0f1d50734
2 changed files with 9 additions and 3 deletions

View file

@ -266,7 +266,7 @@ def management_endpoint_wrapper(func):
) )
_http_request: Request = kwargs.get("http_request") _http_request: Request = kwargs.get("http_request")
parent_otel_span = user_api_key_dict.parent_otel_span parent_otel_span = getattr(user_api_key_dict, "parent_otel_span", None)
if parent_otel_span is not None: if parent_otel_span is not None:
from litellm.proxy.proxy_server import open_telemetry_logger from litellm.proxy.proxy_server import open_telemetry_logger
@ -310,7 +310,7 @@ def management_endpoint_wrapper(func):
user_api_key_dict: UserAPIKeyAuth = ( user_api_key_dict: UserAPIKeyAuth = (
kwargs.get("user_api_key_dict") or UserAPIKeyAuth() kwargs.get("user_api_key_dict") or UserAPIKeyAuth()
) )
parent_otel_span = user_api_key_dict.parent_otel_span parent_otel_span = getattr(user_api_key_dict, "parent_otel_span", None)
if parent_otel_span is not None: if parent_otel_span is not None:
from litellm.proxy.proxy_server import open_telemetry_logger from litellm.proxy.proxy_server import open_telemetry_logger

View file

@ -2328,6 +2328,11 @@ async def test_master_key_hashing(prisma_client):
from litellm.proxy.proxy_server import user_api_key_cache from litellm.proxy.proxy_server import user_api_key_cache
_team_id = "ishaans-special-team_{}".format(uuid.uuid4()) _team_id = "ishaans-special-team_{}".format(uuid.uuid4())
user_api_key_dict = UserAPIKeyAuth(
user_role=LitellmUserRoles.PROXY_ADMIN,
api_key="sk-1234",
user_id="1234",
)
await new_team( await new_team(
NewTeamRequest(team_id=_team_id), NewTeamRequest(team_id=_team_id),
user_api_key_dict=UserAPIKeyAuth( user_api_key_dict=UserAPIKeyAuth(
@ -2343,7 +2348,8 @@ async def test_master_key_hashing(prisma_client):
models=["azure-gpt-3.5"], models=["azure-gpt-3.5"],
team_id=_team_id, team_id=_team_id,
tpm_limit=20, tpm_limit=20,
) ),
user_api_key_dict=user_api_key_dict,
) )
print(_response) print(_response)
assert _response.models == ["azure-gpt-3.5"] assert _response.models == ["azure-gpt-3.5"]