mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix(azure.py-+-proxy_server.py): fix function calling response object + support router on proxy
This commit is contained in:
parent
8f8b35eb34
commit
8c104e9c6a
8 changed files with 54 additions and 59 deletions
|
@ -195,7 +195,7 @@ class AzureChatCompletion(BaseLLM):
|
|||
method="POST"
|
||||
) as response:
|
||||
if response.status_code != 200:
|
||||
raise AzureOpenAIError(status_code=response.status_code, message=response.text)
|
||||
raise AzureOpenAIError(status_code=response.status_code, message="An error occurred while streaming")
|
||||
|
||||
completion_stream = response.iter_lines()
|
||||
streamwrapper = CustomStreamWrapper(completion_stream=completion_stream, model=model, custom_llm_provider="azure",logging_obj=logging_obj)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue