fix(azure.py-+-proxy_server.py): fix function calling response object + support router on proxy

This commit is contained in:
Krrish Dholakia 2023-11-15 13:15:09 -08:00
parent 8f8b35eb34
commit 8c104e9c6a
8 changed files with 54 additions and 59 deletions

View file

@ -195,7 +195,7 @@ class AzureChatCompletion(BaseLLM):
method="POST"
) as response:
if response.status_code != 200:
raise AzureOpenAIError(status_code=response.status_code, message=response.text)
raise AzureOpenAIError(status_code=response.status_code, message="An error occurred while streaming")
completion_stream = response.iter_lines()
streamwrapper = CustomStreamWrapper(completion_stream=completion_stream, model=model, custom_llm_provider="azure",logging_obj=logging_obj)