diff --git a/litellm/llms/azure.py b/litellm/llms/azure.py index 14248acf30..d99310bb5f 100644 --- a/litellm/llms/azure.py +++ b/litellm/llms/azure.py @@ -160,7 +160,7 @@ class AzureChatCompletion(BaseLLM): async def acompletion(self, api_base: str, data: dict, headers: dict, model_response: ModelResponse): async with aiohttp.ClientSession() as session: - async with session.post(api_base, json=data, headers=headers) as response: + async with session.post(api_base, json=data, headers=headers, ssl=False) as response: response_json = await response.json() if response.status != 200: raise AzureOpenAIError(status_code=response.status, message=response.text) @@ -176,7 +176,7 @@ class AzureChatCompletion(BaseLLM): model_response: ModelResponse, model: str): async with aiohttp.ClientSession() as session: - async with session.post(api_base, json=data, headers=headers) as response: + async with session.post(api_base, json=data, headers=headers, ssl=False) as response: # Check if the request was successful (status code 200) if response.status != 200: raise AzureOpenAIError(status_code=response.status, message=await response.text()) diff --git a/litellm/llms/openai.py b/litellm/llms/openai.py index c982d01e42..3eaaea450b 100644 --- a/litellm/llms/openai.py +++ b/litellm/llms/openai.py @@ -273,7 +273,7 @@ class OpenAIChatCompletion(BaseLLM): model_response: ModelResponse, model: str): async with aiohttp.ClientSession() as session: - async with session.post(api_base, json=data, headers=headers) as response: + async with session.post(api_base, json=data, headers=headers, ssl=False) as response: # Check if the request was successful (status code 200) if response.status != 200: raise OpenAIError(status_code=response.status, message=await response.text())