forked from phoenix/litellm-mirror
fix make_async_azure_httpx_request
This commit is contained in:
parent
89d76d1eb7
commit
d4dc8e60b6
2 changed files with 10 additions and 2 deletions
|
@ -771,6 +771,7 @@ jobs:
|
|||
- run: python ./tests/code_coverage_tests/litellm_logging_code_coverage.py
|
||||
- run: python ./tests/documentation_tests/test_env_keys.py
|
||||
- run: python ./tests/documentation_tests/test_api_docs.py
|
||||
- run: python ./tests/code_coverage_tests/ensure_async_clients_test.py
|
||||
- run: helm lint ./deploy/charts/litellm-helm
|
||||
|
||||
db_migration_disable_update_check:
|
||||
|
|
|
@ -12,7 +12,11 @@ from typing_extensions import overload
|
|||
import litellm
|
||||
from litellm.caching.caching import DualCache
|
||||
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
|
||||
from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler, HTTPHandler
|
||||
from litellm.llms.custom_httpx.http_handler import (
|
||||
AsyncHTTPHandler,
|
||||
HTTPHandler,
|
||||
get_async_httpx_client,
|
||||
)
|
||||
from litellm.types.utils import EmbeddingResponse
|
||||
from litellm.utils import (
|
||||
CustomStreamWrapper,
|
||||
|
@ -977,7 +981,10 @@ class AzureChatCompletion(BaseLLM):
|
|||
else:
|
||||
_params["timeout"] = httpx.Timeout(timeout=600.0, connect=5.0)
|
||||
|
||||
async_handler = AsyncHTTPHandler(**_params) # type: ignore
|
||||
async_handler = get_async_httpx_client(
|
||||
llm_provider=litellm.LlmProviders.AZURE,
|
||||
params=_params,
|
||||
)
|
||||
else:
|
||||
async_handler = client # type: ignore
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue