fix(azure.py): raise streaming exceptions

This commit is contained in:
Krrish Dholakia 2023-12-27 15:43:01 +05:30
parent f4fe2575cc
commit 31148922b3
2 changed files with 46 additions and 39 deletions

View file

@ -398,6 +398,7 @@ class AzureChatCompletion(BaseLLM):
azure_ad_token: Optional[str] = None, azure_ad_token: Optional[str] = None,
client=None, client=None,
): ):
try:
# init AzureOpenAI Client # init AzureOpenAI Client
azure_client_params = { azure_client_params = {
"api_version": api_version, "api_version": api_version,
@ -435,6 +436,11 @@ class AzureChatCompletion(BaseLLM):
logging_obj=logging_obj, logging_obj=logging_obj,
) )
return streamwrapper ## DO NOT make this into an async for ... loop, it will yield an async generator, which won't raise errors if the response fails return streamwrapper ## DO NOT make this into an async for ... loop, it will yield an async generator, which won't raise errors if the response fails
except Exception as e:
if hasattr(e, "status_code"):
raise AzureOpenAIError(status_code=e.status_code, message=str(e))
else:
raise AzureOpenAIError(status_code=500, message=str(e))
async def aembedding( async def aembedding(
self, self,

View file

@ -290,6 +290,7 @@ class CompletionCustomHandler(
kwargs["original_response"], (str, litellm.CustomStreamWrapper) kwargs["original_response"], (str, litellm.CustomStreamWrapper)
) )
or inspect.isasyncgen(kwargs["original_response"]) or inspect.isasyncgen(kwargs["original_response"])
or inspect.iscoroutine(kwargs["original_response"])
or kwargs["original_response"] == None or kwargs["original_response"] == None
) )
assert isinstance(kwargs["additional_args"], (dict, type(None))) assert isinstance(kwargs["additional_args"], (dict, type(None)))
@ -439,7 +440,7 @@ async def test_async_chat_azure_stream():
) )
async for chunk in response: async for chunk in response:
continue continue
## test failure callback # test failure callback
try: try:
response = await litellm.acompletion( response = await litellm.acompletion(
model="azure/chatgpt-v-2", model="azure/chatgpt-v-2",
@ -459,7 +460,7 @@ async def test_async_chat_azure_stream():
pytest.fail(f"An exception occurred: {str(e)}") pytest.fail(f"An exception occurred: {str(e)}")
# asyncio.run(test_async_chat_azure_stream()) asyncio.run(test_async_chat_azure_stream())
## Test Bedrock + sync ## Test Bedrock + sync