mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
fix: fix test
This commit is contained in:
parent
bfbe26b91d
commit
01aa536716
4 changed files with 22 additions and 10 deletions
|
@ -7,6 +7,11 @@ model_list:
|
||||||
model: azure/gpt-4o
|
model: azure/gpt-4o
|
||||||
api_key: os.environ/AZURE_API_KEY
|
api_key: os.environ/AZURE_API_KEY
|
||||||
api_base: os.environ/AZURE_API_BASE
|
api_base: os.environ/AZURE_API_BASE
|
||||||
|
- model_name: fake-openai-endpoint-5
|
||||||
|
litellm_params:
|
||||||
|
model: openai/my-fake-model
|
||||||
|
api_key: my-fake-key
|
||||||
|
api_base: https://exampleopenaiendpoint-production.up.railway.app/
|
||||||
|
timeout: 1
|
||||||
litellm_settings:
|
litellm_settings:
|
||||||
fallbacks: [{"gpt-3.5-turbo": ["gpt-4o"]}]
|
fallbacks: [{"gpt-3.5-turbo": ["gpt-4o"]}]
|
||||||
|
|
|
@ -1994,13 +1994,14 @@ class ProxyException(Exception):
|
||||||
message: str,
|
message: str,
|
||||||
type: str,
|
type: str,
|
||||||
param: Optional[str],
|
param: Optional[str],
|
||||||
code: Optional[Union[int, str]] = None,
|
code: Optional[Union[int, str]] = None, # maps to status code
|
||||||
headers: Optional[Dict[str, str]] = None,
|
headers: Optional[Dict[str, str]] = None,
|
||||||
|
openai_code: Optional[str] = None, # maps to 'code' in openai
|
||||||
):
|
):
|
||||||
self.message = str(message)
|
self.message = str(message)
|
||||||
self.type = type
|
self.type = type
|
||||||
self.param = param
|
self.param = param
|
||||||
|
self.openai_code = openai_code or code
|
||||||
# If we look on official python OpenAI lib, the code should be a string:
|
# If we look on official python OpenAI lib, the code should be a string:
|
||||||
# https://github.com/openai/openai-python/blob/195c05a64d39c87b2dfdf1eca2d339597f1fce03/src/openai/types/shared/error_object.py#L11
|
# https://github.com/openai/openai-python/blob/195c05a64d39c87b2dfdf1eca2d339597f1fce03/src/openai/types/shared/error_object.py#L11
|
||||||
# Related LiteLLM issue: https://github.com/BerriAI/litellm/discussions/4834
|
# Related LiteLLM issue: https://github.com/BerriAI/litellm/discussions/4834
|
||||||
|
|
|
@ -3716,7 +3716,8 @@ async def chat_completion( # noqa: PLR0915
|
||||||
message=getattr(e, "message", error_msg),
|
message=getattr(e, "message", error_msg),
|
||||||
type=getattr(e, "type", "None"),
|
type=getattr(e, "type", "None"),
|
||||||
param=getattr(e, "param", "None"),
|
param=getattr(e, "param", "None"),
|
||||||
code=getattr(e, "code", getattr(e, "status_code", 500)),
|
openai_code=getattr(e, "code", None),
|
||||||
|
code=getattr(e, "status_code", 500),
|
||||||
headers=headers,
|
headers=headers,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -3929,7 +3930,8 @@ async def completion( # noqa: PLR0915
|
||||||
message=getattr(e, "message", error_msg),
|
message=getattr(e, "message", error_msg),
|
||||||
type=getattr(e, "type", "None"),
|
type=getattr(e, "type", "None"),
|
||||||
param=getattr(e, "param", "None"),
|
param=getattr(e, "param", "None"),
|
||||||
code=getattr(e, "code", getattr(e, "status_code", 500)),
|
openai_code=getattr(e, "code", None),
|
||||||
|
code=getattr(e, "status_code", 500),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -4138,7 +4140,8 @@ async def embeddings( # noqa: PLR0915
|
||||||
message=getattr(e, "message", error_msg),
|
message=getattr(e, "message", error_msg),
|
||||||
type=getattr(e, "type", "None"),
|
type=getattr(e, "type", "None"),
|
||||||
param=getattr(e, "param", "None"),
|
param=getattr(e, "param", "None"),
|
||||||
code=getattr(e, "code", getattr(e, "status_code", 500)),
|
openai_code=getattr(e, "code", None),
|
||||||
|
code=getattr(e, "status_code", 500),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -4257,7 +4260,8 @@ async def image_generation(
|
||||||
message=getattr(e, "message", error_msg),
|
message=getattr(e, "message", error_msg),
|
||||||
type=getattr(e, "type", "None"),
|
type=getattr(e, "type", "None"),
|
||||||
param=getattr(e, "param", "None"),
|
param=getattr(e, "param", "None"),
|
||||||
code=getattr(e, "code", getattr(e, "status_code", 500)),
|
openai_code=getattr(e, "code", None),
|
||||||
|
code=getattr(e, "status_code", 500),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -4518,7 +4522,8 @@ async def audio_transcriptions(
|
||||||
message=getattr(e, "message", error_msg),
|
message=getattr(e, "message", error_msg),
|
||||||
type=getattr(e, "type", "None"),
|
type=getattr(e, "type", "None"),
|
||||||
param=getattr(e, "param", "None"),
|
param=getattr(e, "param", "None"),
|
||||||
code=getattr(e, "code", getattr(e, "status_code", 500)),
|
openai_code=getattr(e, "code", None),
|
||||||
|
code=getattr(e, "status_code", 500),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -4667,7 +4672,8 @@ async def get_assistants(
|
||||||
message=getattr(e, "message", error_msg),
|
message=getattr(e, "message", error_msg),
|
||||||
type=getattr(e, "type", "None"),
|
type=getattr(e, "type", "None"),
|
||||||
param=getattr(e, "param", "None"),
|
param=getattr(e, "param", "None"),
|
||||||
code=getattr(e, "code", getattr(e, "status_code", 500)),
|
openai_code=getattr(e, "code", None),
|
||||||
|
code=getattr(e, "status_code", 500),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -193,7 +193,7 @@ async def test_chat_completion_with_timeout():
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
response, headers = await chat_completion(
|
response, headers = await chat_completion(
|
||||||
session=session,
|
session=session,
|
||||||
key="sk-1234",
|
key="sk-PIp1h0RekR",
|
||||||
model=model,
|
model=model,
|
||||||
messages=messages,
|
messages=messages,
|
||||||
num_retries=0,
|
num_retries=0,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue