feat(router.py): support content policy fallbacks

Closes https://github.com/BerriAI/litellm/issues/2632
This commit is contained in:
Krrish Dholakia 2024-06-14 17:15:44 -07:00
parent 0404d30a9c
commit 6f715b4782
6 changed files with 197 additions and 33 deletions

View file

@ -401,6 +401,7 @@ def mock_completion(
stream: Optional[bool] = False,
mock_response: Union[str, Exception] = "This is a mock request",
logging=None,
custom_llm_provider=None,
**kwargs,
):
"""
@ -438,7 +439,7 @@ def mock_completion(
raise litellm.APIError(
status_code=getattr(mock_response, "status_code", 500), # type: ignore
message=getattr(mock_response, "text", str(mock_response)),
llm_provider=getattr(mock_response, "llm_provider", "openai"), # type: ignore
llm_provider=getattr(mock_response, "llm_provider", custom_llm_provider or "openai"), # type: ignore
model=model, # type: ignore
request=httpx.Request(method="POST", url="https://api.openai.com/v1/"),
)
@ -907,6 +908,7 @@ def completion(
logging=logging,
acompletion=acompletion,
mock_delay=kwargs.get("mock_delay", None),
custom_llm_provider=custom_llm_provider,
)
if custom_llm_provider == "azure":
# azure configs