litellm/tests/local_testing/test_auth_utils.py
Krish Dholakia f252350881
LiteLLM Minor Fixes & Improvements (10/17/2024) (#6293)
* fix(ui_sso.py): fix faulty admin only check

Fixes https://github.com/BerriAI/litellm/issues/6286

* refactor(sso_helper_utils.py): refactor /sso/callback to use helper utils, covered by unit testing

Prevent future regressions

* feat(prompt_factory): support 'ensure_alternating_roles' param

Closes https://github.com/BerriAI/litellm/issues/6257

* fix(proxy/utils.py): add dailytagspend to expected views

* feat(auth_utils.py): support setting regex for clientside auth credentials

Fixes https://github.com/BerriAI/litellm/issues/6203

* build(cookbook): add tutorial for mlflow + langchain + litellm proxy tracing

* feat(argilla.py): add argilla logging integration

Closes https://github.com/BerriAI/litellm/issues/6201

* fix: fix linting errors

* fix: fix ruff error

* test: fix test

* fix: update vertex ai assumption - parts not always guaranteed (#6296)

* docs(configs.md): add argila env var to docs
2024-10-17 22:09:11 -07:00

70 lines
2 KiB
Python

# What is this?
## Tests if proxy/auth/auth_utils.py works as expected
import sys, os, asyncio, time, random, uuid
import traceback
from dotenv import load_dotenv
load_dotenv()
import os
sys.path.insert(
0, os.path.abspath("../..")
) # Adds the parent directory to the system path
import pytest
import litellm
from litellm.proxy.auth.auth_utils import (
_allow_model_level_clientside_configurable_parameters,
)
from litellm.router import Router
@pytest.mark.parametrize(
"allowed_param, input_value, should_return_true",
[
("api_base", {"api_base": "http://dummy.com"}, True),
(
{"api_base": "https://api.openai.com/v1"},
{"api_base": "https://api.openai.com/v1"},
True,
), # should return True
(
{"api_base": "https://api.openai.com/v1"},
{"api_base": "https://api.anthropic.com/v1"},
False,
), # should return False
(
{"api_base": "^https://litellm.*direct\.fireworks\.ai/v1$"},
{"api_base": "https://litellm-dev.direct.fireworks.ai/v1"},
True,
),
(
{"api_base": "^https://litellm.*novice\.fireworks\.ai/v1$"},
{"api_base": "https://litellm-dev.direct.fireworks.ai/v1"},
False,
),
],
)
def test_configurable_clientside_parameters(
allowed_param, input_value, should_return_true
):
router = Router(
model_list=[
{
"model_name": "dummy-model",
"litellm_params": {
"model": "gpt-3.5-turbo",
"api_key": "dummy-key",
"configurable_clientside_auth_params": [allowed_param],
},
}
]
)
resp = _allow_model_level_clientside_configurable_parameters(
model="dummy-model",
param="api_base",
request_body_value=input_value["api_base"],
llm_router=router,
)
print(resp)
assert resp == should_return_true