mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
adding context window exceeded error to huggingface
This commit is contained in:
parent
ad493a3109
commit
546ad43b15
3 changed files with 11 additions and 1 deletions
Binary file not shown.
|
@ -94,7 +94,10 @@ class HuggingfaceRestAPILLM:
|
||||||
additional_args={"complete_input_dict": data},
|
additional_args={"complete_input_dict": data},
|
||||||
)
|
)
|
||||||
## RESPONSE OBJECT
|
## RESPONSE OBJECT
|
||||||
completion_response = response.json()
|
try:
|
||||||
|
completion_response = response.json()
|
||||||
|
except:
|
||||||
|
raise HuggingfaceError(message=response.text, status_code=response.status_code)
|
||||||
print_verbose(f"response: {completion_response}")
|
print_verbose(f"response: {completion_response}")
|
||||||
if isinstance(completion_response, dict) and "error" in completion_response:
|
if isinstance(completion_response, dict) and "error" in completion_response:
|
||||||
print_verbose(f"completion error: {completion_response['error']}")
|
print_verbose(f"completion error: {completion_response['error']}")
|
||||||
|
|
|
@ -1462,6 +1462,13 @@ def exception_type(model, original_exception, custom_llm_provider):
|
||||||
llm_provider="cohere",
|
llm_provider="cohere",
|
||||||
)
|
)
|
||||||
elif custom_llm_provider == "huggingface":
|
elif custom_llm_provider == "huggingface":
|
||||||
|
if "length limit exceeded" in error_str:
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise ContextWindowExceededError(
|
||||||
|
message=error_str,
|
||||||
|
model=model,
|
||||||
|
llm_provider="huggingface"
|
||||||
|
)
|
||||||
if hasattr(original_exception, "status_code"):
|
if hasattr(original_exception, "status_code"):
|
||||||
if original_exception.status_code == 401:
|
if original_exception.status_code == 401:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue