From e981476ffe22e77a9f01b851545cd98db175be7f Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 21 Feb 2025 14:35:08 +0000 Subject: [PATCH 1/3] Fix leaking file descriptors Creating http clients on the fly and not closing them causes file descriptors to leak. Sometimes the Garbage Collector can deal with this, but not always. --- litellm/llms/anthropic/chat/handler.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/litellm/llms/anthropic/chat/handler.py b/litellm/llms/anthropic/chat/handler.py index c58aa00a10..7e412b36a6 100644 --- a/litellm/llms/anthropic/chat/handler.py +++ b/litellm/llms/anthropic/chat/handler.py @@ -403,10 +403,10 @@ class AnthropicChatCompletion(BaseLLM): ) else: + _close_client = False if client is None or not isinstance(client, HTTPHandler): client = HTTPHandler(timeout=timeout) # type: ignore - else: - client = client + _close_client = True try: response = client.post( @@ -429,6 +429,9 @@ class AnthropicChatCompletion(BaseLLM): status_code=status_code, headers=error_headers, ) + finally: + if _close_client: + client.close() return config.transform_response( model=model, From d5215d428f7e41104b1b6ca5dabb8654ce5d54df Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 21 Feb 2025 16:59:02 +0000 Subject: [PATCH 2/3] Using module level client --- litellm/llms/anthropic/chat/handler.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/litellm/llms/anthropic/chat/handler.py b/litellm/llms/anthropic/chat/handler.py index 7e412b36a6..00629fc8ba 100644 --- a/litellm/llms/anthropic/chat/handler.py +++ b/litellm/llms/anthropic/chat/handler.py @@ -403,10 +403,8 @@ class AnthropicChatCompletion(BaseLLM): ) else: - _close_client = False if client is None or not isinstance(client, HTTPHandler): - client = HTTPHandler(timeout=timeout) # type: ignore - _close_client = True + client = client = litellm.module_level_client try: response = client.post( @@ -429,9 +427,6 @@ class AnthropicChatCompletion(BaseLLM): status_code=status_code, headers=error_headers, ) - finally: - if _close_client: - client.close() return config.transform_response( model=model, From 5047741677d964b9692cfcf2e60ab46d10945cd5 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 24 Feb 2025 08:57:47 +0000 Subject: [PATCH 3/3] Updated as suggested --- litellm/llms/anthropic/chat/handler.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/litellm/llms/anthropic/chat/handler.py b/litellm/llms/anthropic/chat/handler.py index 00629fc8ba..22e255a7a0 100644 --- a/litellm/llms/anthropic/chat/handler.py +++ b/litellm/llms/anthropic/chat/handler.py @@ -19,6 +19,7 @@ from litellm.llms.custom_httpx.http_handler import ( AsyncHTTPHandler, HTTPHandler, get_async_httpx_client, + _get_httpx_client ) from litellm.types.llms.anthropic import ( AnthropicChatCompletionUsageBlock, @@ -404,7 +405,7 @@ class AnthropicChatCompletion(BaseLLM): else: if client is None or not isinstance(client, HTTPHandler): - client = client = litellm.module_level_client + client = _get_httpx_client() try: response = client.post(