mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
fix(azure.py): fix httpx call for dall e 3
This commit is contained in:
parent
540db53ccb
commit
ac00639301
2 changed files with 19 additions and 5 deletions
|
@ -1150,7 +1150,13 @@ class AzureChatCompletion(BaseLLM):
|
||||||
error_data = response.json()
|
error_data = response.json()
|
||||||
raise AzureOpenAIError(status_code=400, message=json.dumps(error_data))
|
raise AzureOpenAIError(status_code=400, message=json.dumps(error_data))
|
||||||
|
|
||||||
return response
|
result = response.json()["result"]
|
||||||
|
return httpx.Response(
|
||||||
|
status_code=200,
|
||||||
|
headers=response.headers,
|
||||||
|
content=json.dumps(result).encode("utf-8"),
|
||||||
|
request=httpx.Request(method="POST", url="https://api.openai.com/v1"),
|
||||||
|
)
|
||||||
return await async_handler.post(
|
return await async_handler.post(
|
||||||
url=api_base,
|
url=api_base,
|
||||||
json=data,
|
json=data,
|
||||||
|
@ -1249,7 +1255,13 @@ class AzureChatCompletion(BaseLLM):
|
||||||
error_data = response.json()
|
error_data = response.json()
|
||||||
raise AzureOpenAIError(status_code=400, message=json.dumps(error_data))
|
raise AzureOpenAIError(status_code=400, message=json.dumps(error_data))
|
||||||
|
|
||||||
return response
|
result = response.json()["result"]
|
||||||
|
return httpx.Response(
|
||||||
|
status_code=200,
|
||||||
|
headers=response.headers,
|
||||||
|
content=json.dumps(result).encode("utf-8"),
|
||||||
|
request=httpx.Request(method="POST", url="https://api.openai.com/v1"),
|
||||||
|
)
|
||||||
return sync_handler.post(
|
return sync_handler.post(
|
||||||
url=api_base,
|
url=api_base,
|
||||||
json=data,
|
json=data,
|
||||||
|
@ -1324,7 +1336,7 @@ class AzureChatCompletion(BaseLLM):
|
||||||
api_key=api_key,
|
api_key=api_key,
|
||||||
data=data,
|
data=data,
|
||||||
)
|
)
|
||||||
response = httpx_response.json()["result"]
|
response = httpx_response.json()
|
||||||
|
|
||||||
stringified_response = response
|
stringified_response = response
|
||||||
## LOGGING
|
## LOGGING
|
||||||
|
@ -1431,7 +1443,7 @@ class AzureChatCompletion(BaseLLM):
|
||||||
api_key=api_key or "",
|
api_key=api_key or "",
|
||||||
data=data,
|
data=data,
|
||||||
)
|
)
|
||||||
response = httpx_response.json()["result"]
|
response = httpx_response.json()
|
||||||
|
|
||||||
## LOGGING
|
## LOGGING
|
||||||
logging_obj.post_call(
|
logging_obj.post_call(
|
||||||
|
|
|
@ -44,7 +44,9 @@ def test_image_generation_openai():
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"sync_mode",
|
"sync_mode",
|
||||||
[True, False],
|
[
|
||||||
|
True,
|
||||||
|
], # False
|
||||||
) #
|
) #
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_image_generation_azure(sync_mode):
|
async def test_image_generation_azure(sync_mode):
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue