fix anthropic use get_async_httpx_client

This commit is contained in:
Ishaan Jaff 2024-11-21 10:18:26 -08:00
parent fb5cc97387
commit 6af0494483
2 changed files with 17 additions and 4 deletions

View file

@ -13,7 +13,11 @@ import httpx
import requests
import litellm
from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler, HTTPHandler
from litellm.llms.custom_httpx.http_handler import (
AsyncHTTPHandler,
HTTPHandler,
get_async_httpx_client,
)
from litellm.utils import CustomStreamWrapper, ModelResponse, Usage
from ..base import BaseLLM
@ -162,7 +166,10 @@ class AnthropicTextCompletion(BaseLLM):
client=None,
):
if client is None:
client = AsyncHTTPHandler(timeout=httpx.Timeout(timeout=600.0, connect=5.0))
client = get_async_httpx_client(
llm_provider=litellm.LlmProviders.ANTHROPIC,
params={"timeout": httpx.Timeout(timeout=600.0, connect=5.0)},
)
response = await client.post(api_base, headers=headers, data=json.dumps(data))
@ -198,7 +205,10 @@ class AnthropicTextCompletion(BaseLLM):
client=None,
):
if client is None:
client = AsyncHTTPHandler(timeout=httpx.Timeout(timeout=600.0, connect=5.0))
client = get_async_httpx_client(
llm_provider=litellm.LlmProviders.ANTHROPIC,
params={"timeout": httpx.Timeout(timeout=600.0, connect=5.0)},
)
response = await client.post(api_base, headers=headers, data=json.dumps(data))

View file

@ -74,7 +74,10 @@ class AzureAIEmbedding(OpenAIChatCompletion):
client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None,
) -> EmbeddingResponse:
if client is None or not isinstance(client, AsyncHTTPHandler):
client = AsyncHTTPHandler(timeout=timeout, concurrent_limit=1)
client = get_async_httpx_client(
llm_provider=litellm.LlmProviders.AZURE_AI,
params={"timeout": timeout},
)
url = "{}/images/embeddings".format(api_base)