mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
rename get_async_httpx_client
This commit is contained in:
parent
1e8cf9f2a6
commit
d4b9a1307d
13 changed files with 27 additions and 27 deletions
|
@ -19,8 +19,8 @@ from litellm.litellm_core_utils.asyncify import asyncify
|
|||
from litellm.llms.custom_httpx.http_handler import (
|
||||
AsyncHTTPHandler,
|
||||
HTTPHandler,
|
||||
_get_async_httpx_client,
|
||||
_get_httpx_client,
|
||||
get_async_httpx_client,
|
||||
)
|
||||
from litellm.types.llms.openai import (
|
||||
ChatCompletionToolCallChunk,
|
||||
|
@ -566,7 +566,7 @@ class SagemakerLLM(BaseAWSLLM):
|
|||
try:
|
||||
if client is None:
|
||||
client = (
|
||||
_get_async_httpx_client()
|
||||
get_async_httpx_client()
|
||||
) # Create a new client if none provided
|
||||
response = await client.post(
|
||||
api_base,
|
||||
|
@ -673,7 +673,7 @@ class SagemakerLLM(BaseAWSLLM):
|
|||
model_id: Optional[str],
|
||||
):
|
||||
timeout = 300.0
|
||||
async_handler = _get_async_httpx_client()
|
||||
async_handler = get_async_httpx_client()
|
||||
|
||||
async_transform_prompt = asyncify(self._transform_prompt)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue