mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
All checks were successful
Read Version from pyproject.toml / read-version (push) Successful in 13s
* fix(http_handler.py): support passing ssl verify dynamically and using the correct httpx client based on passed ssl verify param
Fixes https://github.com/BerriAI/litellm/issues/6499
* feat(llm_http_handler.py): support passing `ssl_verify=False` dynamically in call args
Closes https://github.com/BerriAI/litellm/issues/6499
* fix(proxy/utils.py): prevent bad logs from breaking all cost tracking + reset list regardless of success/failure
prevents malformed logs from causing all spend tracking to break since they're constantly retried
* test(test_proxy_utils.py): add test to ensure bad log is dropped
* test(test_proxy_utils.py): ensure in-memory spend logs reset after bad log error
* test(test_user_api_key_auth.py): add unit test to ensure end user id as str works
* fix(auth_utils.py): ensure extracted end user id is always a str
prevents db cost tracking errors
* test(test_auth_utils.py): ensure get end user id from request body always returns a string
* test: update tests
* test: skip bedrock test- behaviour now supported
* test: fix testing
* refactor(spend_tracking_utils.py): reduce size of get_logging_payload
* test: fix test
* bump: version 1.59.4 → 1.59.5
* Revert "bump: version 1.59.4 → 1.59.5"
This reverts commit 1182b46b2e
.
* fix(utils.py): fix spend logs retry logic
* fix(spend_tracking_utils.py): fix get tags
* fix(spend_tracking_utils.py): fix end user id spend tracking on pass-through endpoints
49 lines
1.3 KiB
Python
49 lines
1.3 KiB
Python
from typing import Optional, Union
|
|
|
|
import httpx
|
|
|
|
try:
|
|
from litellm._version import version
|
|
except Exception:
|
|
version = "0.0.0"
|
|
|
|
headers = {
|
|
"User-Agent": f"litellm/{version}",
|
|
}
|
|
|
|
|
|
class HTTPHandler:
|
|
def __init__(self, concurrent_limit=1000):
|
|
# Create a client with a connection pool
|
|
self.client = httpx.AsyncClient(
|
|
limits=httpx.Limits(
|
|
max_connections=concurrent_limit,
|
|
max_keepalive_connections=concurrent_limit,
|
|
),
|
|
headers=headers,
|
|
)
|
|
|
|
async def close(self):
|
|
# Close the client when you're done with it
|
|
await self.client.aclose()
|
|
|
|
async def get(
|
|
self, url: str, params: Optional[dict] = None, headers: Optional[dict] = None
|
|
):
|
|
response = await self.client.get(url, params=params, headers=headers)
|
|
return response
|
|
|
|
async def post(
|
|
self,
|
|
url: str,
|
|
data: Optional[Union[dict, str]] = None,
|
|
params: Optional[dict] = None,
|
|
headers: Optional[dict] = None,
|
|
):
|
|
try:
|
|
response = await self.client.post(
|
|
url, data=data, params=params, headers=headers # type: ignore
|
|
)
|
|
return response
|
|
except Exception as e:
|
|
raise e
|