(feat) mistral - add exception mapping

This commit is contained in:
ishaan-jaff 2023-12-14 18:57:39 +05:30
parent a033016a63
commit c0cc78b943
3 changed files with 37 additions and 3 deletions

View file

@ -163,6 +163,14 @@ openai_compatible_endpoints: List = [
"api.mistral.ai/v1"
]
# this is maintained for Exception Mapping
openai_compatible_providers: List = [
"anyscale",
"mistral",
"deepinfra",
"perplexity"
]
# well supported replicate llms
replicate_models: List = [

View file

@ -197,7 +197,7 @@ def test_completion_azure_exception():
print("good job got the correct error for azure when key not set")
except Exception as e:
pytest.fail(f"Error occurred: {e}")
test_completion_azure_exception()
# test_completion_azure_exception()
async def asynctest_completion_azure_exception():
try:
@ -227,7 +227,6 @@ async def asynctest_completion_azure_exception():
print("Got wrong exception")
print("exception", e)
pytest.fail(f"Error occurred: {e}")
# import asyncio
# asyncio.run(
# asynctest_completion_azure_exception()
@ -261,6 +260,33 @@ def test_completion_openai_exception():
pytest.fail(f"Error occurred: {e}")
# test_completion_openai_exception()
def test_completion_mistral_exception():
# test if mistral/mistral-tiny raises openai.AuthenticationError
try:
import openai
print("Testing mistral ai exception mapping")
litellm.set_verbose=False
## Test azure call
old_azure_key = os.environ["MISTRAL_API_KEY"]
os.environ["MISTRAL_API_KEY"] = "good morning"
response = completion(
model="mistral/mistral-tiny",
messages=[
{
"role": "user",
"content": "hello"
}
],
)
print(f"response: {response}")
print(response)
except openai.AuthenticationError as e:
os.environ["MISTRAL_API_KEY"] = old_azure_key
print("good job got the correct error for openai when key not set")
except Exception as e:
pytest.fail(f"Error occurred: {e}")
# test_completion_mistral_exception()

View file

@ -4163,7 +4163,7 @@ def exception_type(
llm_provider=custom_llm_provider
)
if custom_llm_provider == "openai" or custom_llm_provider == "text-completion-openai" or custom_llm_provider == "custom_openai":
if custom_llm_provider == "openai" or custom_llm_provider == "text-completion-openai" or custom_llm_provider == "custom_openai" or custom_llm_provider in litellm.openai_compatible_providers:
if "This model's maximum context length is" in error_str or "Request too large" in error_str:
exception_mapping_worked = True
raise ContextWindowExceededError(