mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 02:34:29 +00:00
fix(main.py): fix key leak error when unknown provider given (#8556)
* fix(main.py): fix key leak error when unknown provider given don't return passed in args if unknown route on embedding * fix(main.py): remove instances of {args} being passed in exception prevent potential key leaks * test(code_coverage/prevent_key_leaks_in_codebase.py): ban usage of {args} in codebase * fix: fix linting errors * fix: remove unused variable
This commit is contained in:
parent
c6026ea6f9
commit
a9276f27f9
8 changed files with 193 additions and 30 deletions
|
@ -14,6 +14,8 @@ from typing import Optional
|
|||
import httpx
|
||||
import openai
|
||||
|
||||
from litellm.types.utils import LiteLLMCommonStrings
|
||||
|
||||
|
||||
class AuthenticationError(openai.AuthenticationError): # type: ignore
|
||||
def __init__(
|
||||
|
@ -790,3 +792,16 @@ class MockException(openai.APIError):
|
|||
if request is None:
|
||||
request = httpx.Request(method="POST", url="https://api.openai.com/v1")
|
||||
super().__init__(self.message, request=request, body=None) # type: ignore
|
||||
|
||||
|
||||
class LiteLLMUnknownProvider(BadRequestError):
|
||||
def __init__(self, model: str, custom_llm_provider: Optional[str] = None):
|
||||
self.message = LiteLLMCommonStrings.llm_provider_not_provided.value.format(
|
||||
model=model, custom_llm_provider=custom_llm_provider
|
||||
)
|
||||
super().__init__(
|
||||
self.message, model=model, llm_provider=custom_llm_provider, response=None
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.message
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue