add context window exceeded error mapping to openai

This commit is contained in:
Krrish Dholakia 2023-08-29 16:08:00 -07:00
parent d7fe4f6c90
commit f7955d52b5
4 changed files with 10 additions and 5 deletions

View file

@ -35,12 +35,12 @@ litellm.failure_callback = ["sentry"]
# Approach: Run each model through the test -> assert if the correct error (always the same one) is triggered
# models = ["gpt-3.5-turbo", "chatgpt-test", "claude-instant-1", "command-nightly"]
test_model = "claude-instant-1"
models = ["claude-instant-1"]
test_model = "gpt-3.5-turbo"
models = ["gpt-3.5-turbo"]
def logging_fn(model_call_dict):
return
# return
if "model" in model_call_dict:
print(f"model_call_dict: {model_call_dict['model']}")
else:
@ -59,7 +59,6 @@ def test_context_window(model):
messages=messages,
logger_fn=logging_fn,
)
print(f"response: {response}")
except ContextWindowExceededError as e:
print(f"ContextWindowExceededError: {e.llm_provider}")
return

View file

@ -1343,6 +1343,12 @@ def exception_type(model, original_exception, custom_llm_provider):
original_exception.llm_provider = "azure"
else:
original_exception.llm_provider = "openai"
if "This model's maximum context length is" in original_exception:
raise ContextWindowExceededError(
message=str(original_exception),
model=model,
llm_provider=original_exception.llm_provider
)
raise original_exception
elif model:
error_str = str(original_exception)

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "litellm"
version = "0.1.501"
version = "0.1.502"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT License"