Merge pull request #3433 from BerriAI/litellm_timeout_fix

fix(bedrock.py): convert httpx.timeout to boto3 valid timeout
This commit is contained in:
Krish Dholakia 2024-05-03 18:53:59 -07:00 committed by GitHub
commit 7d2aa2f645
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 146 additions and 89 deletions

View file

@ -151,7 +151,7 @@ class AzureChatCompletion(BaseLLM):
api_type: str, api_type: str,
azure_ad_token: str, azure_ad_token: str,
print_verbose: Callable, print_verbose: Callable,
timeout, timeout: Union[float, httpx.Timeout],
logging_obj, logging_obj,
optional_params, optional_params,
litellm_params, litellm_params,

View file

@ -533,7 +533,7 @@ def init_bedrock_client(
aws_session_name: Optional[str] = None, aws_session_name: Optional[str] = None,
aws_profile_name: Optional[str] = None, aws_profile_name: Optional[str] = None,
aws_role_name: Optional[str] = None, aws_role_name: Optional[str] = None,
timeout: Optional[int] = None, timeout: Optional[Union[float, httpx.Timeout]] = None,
): ):
# check for custom AWS_REGION_NAME and use it if not passed to init_bedrock_client # check for custom AWS_REGION_NAME and use it if not passed to init_bedrock_client
litellm_aws_region_name = get_secret("AWS_REGION_NAME", None) litellm_aws_region_name = get_secret("AWS_REGION_NAME", None)
@ -592,7 +592,14 @@ def init_bedrock_client(
import boto3 import boto3
config = boto3.session.Config(connect_timeout=timeout, read_timeout=timeout) if isinstance(timeout, float):
config = boto3.session.Config(connect_timeout=timeout, read_timeout=timeout)
elif isinstance(timeout, httpx.Timeout):
config = boto3.session.Config(
connect_timeout=timeout.connect, read_timeout=timeout.read
)
else:
config = boto3.session.Config()
### CHECK STS ### ### CHECK STS ###
if aws_role_name is not None and aws_session_name is not None: if aws_role_name is not None and aws_session_name is not None:

View file

@ -246,7 +246,7 @@ class OpenAIChatCompletion(BaseLLM):
def completion( def completion(
self, self,
model_response: ModelResponse, model_response: ModelResponse,
timeout: float, timeout: Union[float, httpx.Timeout],
model: Optional[str] = None, model: Optional[str] = None,
messages: Optional[list] = None, messages: Optional[list] = None,
print_verbose: Optional[Callable] = None, print_verbose: Optional[Callable] = None,
@ -271,9 +271,12 @@ class OpenAIChatCompletion(BaseLLM):
if model is None or messages is None: if model is None or messages is None:
raise OpenAIError(status_code=422, message=f"Missing model or messages") raise OpenAIError(status_code=422, message=f"Missing model or messages")
if not isinstance(timeout, float): if not isinstance(timeout, float) and not isinstance(
timeout, httpx.Timeout
):
raise OpenAIError( raise OpenAIError(
status_code=422, message=f"Timeout needs to be a float" status_code=422,
message=f"Timeout needs to be a float or httpx.Timeout",
) )
if custom_llm_provider != "openai": if custom_llm_provider != "openai":
@ -425,7 +428,7 @@ class OpenAIChatCompletion(BaseLLM):
self, self,
data: dict, data: dict,
model_response: ModelResponse, model_response: ModelResponse,
timeout: float, timeout: Union[float, httpx.Timeout],
api_key: Optional[str] = None, api_key: Optional[str] = None,
api_base: Optional[str] = None, api_base: Optional[str] = None,
organization: Optional[str] = None, organization: Optional[str] = None,
@ -480,7 +483,7 @@ class OpenAIChatCompletion(BaseLLM):
def streaming( def streaming(
self, self,
logging_obj, logging_obj,
timeout: float, timeout: Union[float, httpx.Timeout],
data: dict, data: dict,
model: str, model: str,
api_key: Optional[str] = None, api_key: Optional[str] = None,
@ -524,7 +527,7 @@ class OpenAIChatCompletion(BaseLLM):
async def async_streaming( async def async_streaming(
self, self,
logging_obj, logging_obj,
timeout: float, timeout: Union[float, httpx.Timeout],
data: dict, data: dict,
model: str, model: str,
api_key: Optional[str] = None, api_key: Optional[str] = None,

View file

@ -39,6 +39,7 @@ from litellm.utils import (
Usage, Usage,
get_optional_params_embeddings, get_optional_params_embeddings,
get_optional_params_image_gen, get_optional_params_image_gen,
supports_httpx_timeout,
) )
from .llms import ( from .llms import (
anthropic_text, anthropic_text,
@ -450,7 +451,7 @@ def completion(
model: str, model: str,
# Optional OpenAI params: see https://platform.openai.com/docs/api-reference/chat/create # Optional OpenAI params: see https://platform.openai.com/docs/api-reference/chat/create
messages: List = [], messages: List = [],
timeout: Optional[Union[float, int]] = None, timeout: Optional[Union[float, str, httpx.Timeout]] = None,
temperature: Optional[float] = None, temperature: Optional[float] = None,
top_p: Optional[float] = None, top_p: Optional[float] = None,
n: Optional[int] = None, n: Optional[int] = None,
@ -648,11 +649,21 @@ def completion(
non_default_params = { non_default_params = {
k: v for k, v in kwargs.items() if k not in default_params k: v for k, v in kwargs.items() if k not in default_params
} # model-specific params - pass them straight to the model/provider } # model-specific params - pass them straight to the model/provider
if timeout is None:
timeout = ( ### TIMEOUT LOGIC ###
kwargs.get("request_timeout", None) or 600 timeout = timeout or kwargs.get("request_timeout", 600) or 600
) # set timeout for 10 minutes by default # set timeout for 10 minutes by default
timeout = float(timeout)
if (
timeout is not None
and isinstance(timeout, httpx.Timeout)
and supports_httpx_timeout(custom_llm_provider) == False
):
read_timeout = timeout.read or 600
timeout = read_timeout # default 10 min timeout
elif timeout is not None and not isinstance(timeout, httpx.Timeout):
timeout = float(timeout) # type: ignore
try: try:
if base_url is not None: if base_url is not None:
api_base = base_url api_base = base_url
@ -873,7 +884,7 @@ def completion(
logger_fn=logger_fn, logger_fn=logger_fn,
logging_obj=logging, logging_obj=logging,
acompletion=acompletion, acompletion=acompletion,
timeout=timeout, timeout=timeout, # type: ignore
client=client, # pass AsyncAzureOpenAI, AzureOpenAI client client=client, # pass AsyncAzureOpenAI, AzureOpenAI client
) )
@ -1014,7 +1025,7 @@ def completion(
optional_params=optional_params, optional_params=optional_params,
litellm_params=litellm_params, litellm_params=litellm_params,
logger_fn=logger_fn, logger_fn=logger_fn,
timeout=timeout, timeout=timeout, # type: ignore
custom_prompt_dict=custom_prompt_dict, custom_prompt_dict=custom_prompt_dict,
client=client, # pass AsyncOpenAI, OpenAI client client=client, # pass AsyncOpenAI, OpenAI client
organization=organization, organization=organization,
@ -1099,7 +1110,7 @@ def completion(
optional_params=optional_params, optional_params=optional_params,
litellm_params=litellm_params, litellm_params=litellm_params,
logger_fn=logger_fn, logger_fn=logger_fn,
timeout=timeout, timeout=timeout, # type: ignore
) )
if ( if (
@ -1473,7 +1484,7 @@ def completion(
acompletion=acompletion, acompletion=acompletion,
logging_obj=logging, logging_obj=logging,
custom_prompt_dict=custom_prompt_dict, custom_prompt_dict=custom_prompt_dict,
timeout=timeout, timeout=timeout, # type: ignore
) )
if ( if (
"stream" in optional_params "stream" in optional_params
@ -1566,7 +1577,7 @@ def completion(
logger_fn=logger_fn, logger_fn=logger_fn,
logging_obj=logging, logging_obj=logging,
acompletion=acompletion, acompletion=acompletion,
timeout=timeout, timeout=timeout, # type: ignore
) )
## LOGGING ## LOGGING
logging.post_call( logging.post_call(
@ -1893,7 +1904,7 @@ def completion(
logger_fn=logger_fn, logger_fn=logger_fn,
encoding=encoding, encoding=encoding,
logging_obj=logging, logging_obj=logging,
timeout=timeout, timeout=timeout, # type: ignore
) )
if ( if (
"stream" in optional_params "stream" in optional_params

View file

@ -375,7 +375,9 @@ class Router:
except Exception as e: except Exception as e:
raise e raise e
def _completion(self, model: str, messages: List[Dict[str, str]], **kwargs): def _completion(
self, model: str, messages: List[Dict[str, str]], **kwargs
) -> Union[ModelResponse, CustomStreamWrapper]:
model_name = None model_name = None
try: try:
# pick the one that is available (lowest TPM/RPM) # pick the one that is available (lowest TPM/RPM)
@ -438,7 +440,9 @@ class Router:
) )
raise e raise e
async def acompletion(self, model: str, messages: List[Dict[str, str]], **kwargs): async def acompletion(
self, model: str, messages: List[Dict[str, str]], **kwargs
) -> Union[ModelResponse, CustomStreamWrapper]:
try: try:
kwargs["model"] = model kwargs["model"] = model
kwargs["messages"] = messages kwargs["messages"] = messages
@ -454,7 +458,9 @@ class Router:
except Exception as e: except Exception as e:
raise e raise e
async def _acompletion(self, model: str, messages: List[Dict[str, str]], **kwargs): async def _acompletion(
self, model: str, messages: List[Dict[str, str]], **kwargs
) -> Union[ModelResponse, CustomStreamWrapper]:
""" """
- Get an available deployment - Get an available deployment
- call it with a semaphore over the call - call it with a semaphore over the call

View file

@ -5,74 +5,59 @@ plugins: timeout-2.2.0, asyncio-0.23.2, anyio-3.7.1, xdist-3.3.1
asyncio: mode=Mode.STRICT asyncio: mode=Mode.STRICT
collected 1 item collected 1 item
test_custom_logger.py Chunks have a created at hidden param test_image_generation.py . [100%]
Chunks sorted
token_counter messages received: [{'role': 'user', 'content': 'write a one sentence poem about: 73348'}]
Token Counter - using OpenAI token counter, for model=gpt-3.5-turbo
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Logging Details LiteLLM-Success Call: None
success callbacks: []
Token Counter - using OpenAI token counter, for model=gpt-3.5-turbo
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Logging Details LiteLLM-Success Call streaming complete
Looking up model=gpt-3.5-turbo in model_cost_map
Success: model=gpt-3.5-turbo in model_cost_map
prompt_tokens=17; completion_tokens=0
Returned custom cost for model=gpt-3.5-turbo - prompt_tokens_cost_usd_dollar: 2.55e-05, completion_tokens_cost_usd_dollar: 0.0
final cost: 2.55e-05; prompt_tokens_cost_usd_dollar: 2.55e-05; completion_tokens_cost_usd_dollar: 0.0
. [100%]
=============================== warnings summary =============================== =============================== warnings summary ===============================
../../../../../../opt/homebrew/lib/python3.11/site-packages/pydantic/_internal/_config.py:284: 18 warnings ../../../../../../opt/homebrew/lib/python3.11/site-packages/pydantic/_internal/_config.py:284: 23 warnings
/opt/homebrew/lib/python3.11/site-packages/pydantic/_internal/_config.py:284: PydanticDeprecatedSince20: Support for class-based `config` is deprecated, use ConfigDict instead. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/ /opt/homebrew/lib/python3.11/site-packages/pydantic/_internal/_config.py:284: PydanticDeprecatedSince20: Support for class-based `config` is deprecated, use ConfigDict instead. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/
warnings.warn(DEPRECATION_MESSAGE, DeprecationWarning) warnings.warn(DEPRECATION_MESSAGE, DeprecationWarning)
../proxy/_types.py:218 ../proxy/_types.py:219
/Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:218: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/ /Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:219: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/
@root_validator(pre=True) @root_validator(pre=True)
../proxy/_types.py:305 ../proxy/_types.py:306
/Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:305: PydanticDeprecatedSince20: `pydantic.config.Extra` is deprecated, use literal values instead (e.g. `extra='allow'`). Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/ /Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:306: PydanticDeprecatedSince20: `pydantic.config.Extra` is deprecated, use literal values instead (e.g. `extra='allow'`). Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/
extra = Extra.allow # Allow extra fields extra = Extra.allow # Allow extra fields
../proxy/_types.py:308 ../proxy/_types.py:309
/Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:308: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/ /Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:309: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/
@root_validator(pre=True) @root_validator(pre=True)
../proxy/_types.py:337 ../proxy/_types.py:338
/Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:337: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/ /Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:338: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/
@root_validator(pre=True) @root_validator(pre=True)
../proxy/_types.py:384 ../proxy/_types.py:385
/Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:384: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/ /Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:385: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/
@root_validator(pre=True) @root_validator(pre=True)
../proxy/_types.py:450 ../proxy/_types.py:454
/Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:450: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/ /Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:454: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/
@root_validator(pre=True) @root_validator(pre=True)
../proxy/_types.py:462 ../proxy/_types.py:466
/Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:462: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/ /Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:466: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/
@root_validator(pre=True) @root_validator(pre=True)
../proxy/_types.py:502 ../proxy/_types.py:509
/Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:502: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/ /Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:509: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/
@root_validator(pre=True) @root_validator(pre=True)
../proxy/_types.py:536 ../proxy/_types.py:546
/Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:536: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/ /Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:546: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/
@root_validator(pre=True) @root_validator(pre=True)
../proxy/_types.py:823 ../proxy/_types.py:840
/Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:823: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/ /Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:840: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/
@root_validator(pre=True) @root_validator(pre=True)
../proxy/_types.py:850 ../proxy/_types.py:867
/Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:850: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/ /Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:867: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/
@root_validator(pre=True) @root_validator(pre=True)
../proxy/_types.py:869 ../proxy/_types.py:886
/Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:869: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/ /Users/krrishdholakia/Documents/litellm/litellm/proxy/_types.py:886: PydanticDeprecatedSince20: Pydantic V1 style `@root_validator` validators are deprecated. You should migrate to Pydantic V2 style `@model_validator` validators, see the migration guide for more details. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/
@root_validator(pre=True) @root_validator(pre=True)
../../../../../../opt/homebrew/lib/python3.11/site-packages/pkg_resources/__init__.py:121 ../../../../../../opt/homebrew/lib/python3.11/site-packages/pkg_resources/__init__.py:121
@ -126,30 +111,33 @@ final cost: 2.55e-05; prompt_tokens_cost_usd_dollar: 2.55e-05; completion_tokens
Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages
declare_namespace(pkg) declare_namespace(pkg)
test_custom_logger.py::test_redis_cache_completion_stream test_image_generation.py::test_aimage_generation_bedrock_with_optional_params
/opt/homebrew/lib/python3.11/site-packages/_pytest/unraisableexception.py:78: PytestUnraisableExceptionWarning: Exception ignored in: <function StreamWriter.__del__ at 0x1019c28e0> /opt/homebrew/lib/python3.11/site-packages/_pytest/threadexception.py:73: PytestUnhandledThreadExceptionWarning: Exception in thread Thread-1 (success_handler)
Traceback (most recent call last): Traceback (most recent call last):
File "/opt/homebrew/Cellar/python@3.11/3.11.6_1/Frameworks/Python.framework/Versions/3.11/lib/python3.11/asyncio/streams.py", line 395, in __del__ File "/Users/krrishdholakia/Documents/litellm/litellm/utils.py", line 1412, in _success_handler_helper_fn
self.close() litellm.completion_cost(
File "/opt/homebrew/Cellar/python@3.11/3.11.6_1/Frameworks/Python.framework/Versions/3.11/lib/python3.11/asyncio/streams.py", line 343, in close File "/Users/krrishdholakia/Documents/litellm/litellm/utils.py", line 4442, in completion_cost
return self._transport.close() raise e
^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/krrishdholakia/Documents/litellm/litellm/utils.py", line 4405, in completion_cost
File "/opt/homebrew/Cellar/python@3.11/3.11.6_1/Frameworks/Python.framework/Versions/3.11/lib/python3.11/asyncio/sslproto.py", line 112, in close raise Exception(
self._ssl_protocol._start_shutdown() Exception: Model=1024-x-1024/stability.stable-diffusion-xl-v1 not found in completion cost model map
File "/opt/homebrew/Cellar/python@3.11/3.11.6_1/Frameworks/Python.framework/Versions/3.11/lib/python3.11/asyncio/sslproto.py", line 620, in _start_shutdown
self._shutdown_timeout_handle = self._loop.call_later(
^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/python@3.11/3.11.6_1/Frameworks/Python.framework/Versions/3.11/lib/python3.11/asyncio/base_events.py", line 727, in call_later
timer = self.call_at(self.time() + delay, callback, *args,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/python@3.11/3.11.6_1/Frameworks/Python.framework/Versions/3.11/lib/python3.11/asyncio/base_events.py", line 740, in call_at
self._check_closed()
File "/opt/homebrew/Cellar/python@3.11/3.11.6_1/Frameworks/Python.framework/Versions/3.11/lib/python3.11/asyncio/base_events.py", line 519, in _check_closed
raise RuntimeError('Event loop is closed')
RuntimeError: Event loop is closed
warnings.warn(pytest.PytestUnraisableExceptionWarning(msg)) During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/homebrew/Cellar/python@3.11/3.11.6_1/Frameworks/Python.framework/Versions/3.11/lib/python3.11/threading.py", line 1045, in _bootstrap_inner
self.run()
File "/opt/homebrew/Cellar/python@3.11/3.11.6_1/Frameworks/Python.framework/Versions/3.11/lib/python3.11/threading.py", line 982, in run
self._target(*self._args, **self._kwargs)
File "/Users/krrishdholakia/Documents/litellm/litellm/utils.py", line 1465, in success_handler
start_time, end_time, result = self._success_handler_helper_fn(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/krrishdholakia/Documents/litellm/litellm/utils.py", line 1459, in _success_handler_helper_fn
raise Exception(f"[Non-Blocking] LiteLLM.Success_Call Error: {str(e)}")
Exception: [Non-Blocking] LiteLLM.Success_Call Error: Model=1024-x-1024/stability.stable-diffusion-xl-v1 not found in completion cost model map
warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg))
-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html -- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html
======================== 1 passed, 56 warnings in 2.43s ======================== ======================== 1 passed, 61 warnings in 3.00s ========================

View file

@ -10,7 +10,37 @@ sys.path.insert(
import time import time
import litellm import litellm
import openai import openai
import pytest, uuid import pytest, uuid, httpx
@pytest.mark.parametrize(
"model, provider",
[
("gpt-3.5-turbo", "openai"),
("anthropic.claude-instant-v1", "bedrock"),
("azure/chatgpt-v-2", "azure"),
],
)
@pytest.mark.parametrize("sync_mode", [True, False])
@pytest.mark.asyncio
async def test_httpx_timeout(model, provider, sync_mode):
"""
Test if setting httpx.timeout works for completion calls
"""
timeout_val = httpx.Timeout(10.0, connect=60.0)
messages = [{"role": "user", "content": "Hey, how's it going?"}]
if sync_mode:
response = litellm.completion(
model=model, messages=messages, timeout=timeout_val
)
else:
response = await litellm.acompletion(
model=model, messages=messages, timeout=timeout_val
)
print(f"response: {response}")
def test_timeout(): def test_timeout():

View file

@ -4453,7 +4453,19 @@ def completion_cost(
raise e raise e
def supports_function_calling(model: str): def supports_httpx_timeout(custom_llm_provider: str) -> bool:
"""
Helper function to know if a provider implementation supports httpx timeout
"""
supported_providers = ["openai", "azure", "bedrock"]
if custom_llm_provider in supported_providers:
return True
return False
def supports_function_calling(model: str) -> bool:
""" """
Check if the given model supports function calling and return a boolean value. Check if the given model supports function calling and return a boolean value.