mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
feat(router.py): support content policy fallbacks
Closes https://github.com/BerriAI/litellm/issues/2632
This commit is contained in:
parent
0404d30a9c
commit
6f715b4782
6 changed files with 197 additions and 33 deletions
|
@ -1109,3 +1109,59 @@ async def test_client_side_fallbacks_list(sync_mode):
|
|||
|
||||
assert isinstance(response, litellm.ModelResponse)
|
||||
assert response.model is not None and response.model == "gpt-4o"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("sync_mode", [True, False])
|
||||
@pytest.mark.asyncio
|
||||
async def test_router_content_policy_fallbacks(sync_mode):
|
||||
os.environ["LITELLM_LOG"] = "DEBUG"
|
||||
router = Router(
|
||||
model_list=[
|
||||
{
|
||||
"model_name": "claude-2",
|
||||
"litellm_params": {
|
||||
"model": "claude-2",
|
||||
"api_key": "",
|
||||
"mock_response": Exception("content filtering policy"),
|
||||
},
|
||||
},
|
||||
{
|
||||
"model_name": "my-fallback-model",
|
||||
"litellm_params": {
|
||||
"model": "claude-2",
|
||||
"api_key": "",
|
||||
"mock_response": "This works!",
|
||||
},
|
||||
},
|
||||
{
|
||||
"model_name": "my-general-model",
|
||||
"litellm_params": {
|
||||
"model": "claude-2",
|
||||
"api_key": "",
|
||||
"mock_response": Exception("Should not have called this."),
|
||||
},
|
||||
},
|
||||
{
|
||||
"model_name": "my-context-window-model",
|
||||
"litellm_params": {
|
||||
"model": "claude-2",
|
||||
"api_key": "",
|
||||
"mock_response": Exception("Should not have called this."),
|
||||
},
|
||||
},
|
||||
],
|
||||
content_policy_fallbacks=[{"claude-2": ["my-fallback-model"]}],
|
||||
fallbacks=[{"claude-2": ["my-general-model"]}],
|
||||
context_window_fallbacks=[{"claude-2": ["my-context-window-model"]}],
|
||||
)
|
||||
|
||||
if sync_mode is True:
|
||||
response = router.completion(
|
||||
model="claude-2",
|
||||
messages=[{"role": "user", "content": "Hey, how's it going?"}],
|
||||
)
|
||||
else:
|
||||
response = await router.acompletion(
|
||||
model="claude-2",
|
||||
messages=[{"role": "user", "content": "Hey, how's it going?"}],
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue