fix(utils.py): predibase exception mapping - map 424 as a badrequest error

This commit is contained in:
Krrish Dholakia 2024-06-25 13:47:38 -07:00
parent 193af5f23b
commit 1e51b8894f
3 changed files with 28 additions and 28 deletions

View file

@ -1,27 +1,26 @@
# What is this?
## Controller file for Predibase Integration - https://predibase.com/
from functools import partial
import os, types
import traceback
import copy
import json
from enum import Enum
import requests, copy # type: ignore
import os
import time
from typing import Callable, Optional, List, Literal, Union
from litellm.utils import (
ModelResponse,
Usage,
CustomStreamWrapper,
Message,
Choices,
)
from litellm.litellm_core_utils.core_helpers import map_finish_reason
import litellm
from .prompt_templates.factory import prompt_factory, custom_prompt
from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler
from .base import BaseLLM
import traceback
import types
from enum import Enum
from functools import partial
from typing import Callable, List, Literal, Optional, Union
import httpx # type: ignore
import requests # type: ignore
import litellm
from litellm.litellm_core_utils.core_helpers import map_finish_reason
from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler
from litellm.utils import Choices, CustomStreamWrapper, Message, ModelResponse, Usage
from .base import BaseLLM
from .prompt_templates.factory import custom_prompt, prompt_factory
class PredibaseError(Exception):
@ -496,7 +495,9 @@ class PredibaseChatCompletion(BaseLLM):
except httpx.HTTPStatusError as e:
raise PredibaseError(
status_code=e.response.status_code,
message="HTTPStatusError - {}".format(e.response.text),
message="HTTPStatusError - received status_code={}, error_message={}".format(
e.response.status_code, e.response.text
),
)
except Exception as e:
raise PredibaseError(

View file

@ -14,9 +14,10 @@ model_list:
- model_name: fake-openai-endpoint
litellm_params:
model: predibase/llama-3-8b-instruct
api_base: "http://0.0.0.0:8000"
# api_base: "http://0.0.0.0:8081"
api_key: os.environ/PREDIBASE_API_KEY
tenant_id: os.environ/PREDIBASE_TENANT_ID
adapter_id: qwoiqjdoqin
max_retries: 0
temperature: 0.1
max_new_tokens: 256
@ -73,6 +74,8 @@ model_list:
litellm_settings:
callbacks: ["dynamic_rate_limiter"]
# success_callback: ["langfuse"]
# failure_callback: ["langfuse"]
# default_team_settings:
# - team_id: proj1
# success_callback: ["langfuse"]

View file

@ -6157,13 +6157,6 @@ def exception_type(
response=original_exception.response,
litellm_debug_info=extra_information,
)
if "Request failed during generation" in error_str:
# this is an internal server error from predibase
raise litellm.InternalServerError(
message=f"PredibaseException - {error_str}",
llm_provider="predibase",
model=model,
)
elif hasattr(original_exception, "status_code"):
if original_exception.status_code == 500:
exception_mapping_worked = True
@ -6201,7 +6194,10 @@ def exception_type(
llm_provider=custom_llm_provider,
litellm_debug_info=extra_information,
)
elif original_exception.status_code == 422:
elif (
original_exception.status_code == 422
or original_exception.status_code == 424
):
exception_mapping_worked = True
raise BadRequestError(
message=f"PredibaseException - {original_exception.message}",