LiteLLM Minor Fixes & Improvements (10/16/2024) (#6265)

* fix(caching_handler.py): handle positional arguments in add cache logic

Fixes https://github.com/BerriAI/litellm/issues/6264

* feat(litellm_pre_call_utils.py): allow forwarding openai org id to backend client

https://github.com/BerriAI/litellm/issues/6237

* docs(configs.md): add 'forward_openai_org_id' to docs

* fix(proxy_server.py): return model info if user_model is set

Fixes https://github.com/BerriAI/litellm/issues/6233

* fix(hosted_vllm/chat/transformation.py): don't set tools unless non-none

* fix(openai.py): improve debug log for openai 'str' error

Addresses https://github.com/BerriAI/litellm/issues/6272

* fix(proxy_server.py): fix linting error

* fix(proxy_server.py): fix linting errors

* test: skip WIP test

* docs(openai.md): add docs on passing openai org id from client to openai
This commit is contained in:
Krish Dholakia 2024-10-16 22:16:23 -07:00 committed by GitHub
parent 43878bd2a0
commit 38a9a106d2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
14 changed files with 371 additions and 47 deletions

View file

@ -9,6 +9,7 @@ from litellm._logging import verbose_logger, verbose_proxy_logger
from litellm.proxy._types import (
AddTeamCallback,
CommonProxyErrors,
LitellmDataForBackendLLMCall,
LiteLLMRoutes,
SpecialHeaders,
TeamCallbackMetadata,
@ -172,9 +173,44 @@ def get_forwardable_headers(
"x-stainless"
): # causes openai sdk to fail
forwarded_headers[header] = value
return forwarded_headers
def get_openai_org_id_from_headers(
headers: dict, general_settings: Optional[Dict] = None
) -> Optional[str]:
"""
Get the OpenAI Org ID from the headers.
"""
if (
general_settings is not None
and general_settings.get("forward_openai_org_id") is not True
):
return None
for header, value in headers.items():
if header.lower() == "openai-organization":
return value
return None
def add_litellm_data_for_backend_llm_call(
headers: dict, general_settings: Optional[Dict[str, Any]] = None
) -> LitellmDataForBackendLLMCall:
"""
- Adds forwardable headers
- Adds org id
"""
data = LitellmDataForBackendLLMCall()
_headers = get_forwardable_headers(headers)
if _headers != {}:
data["headers"] = _headers
_organization = get_openai_org_id_from_headers(headers, general_settings)
if _organization is not None:
data["organization"] = _organization
return data
async def add_litellm_data_to_request(
data: dict,
request: Request,
@ -210,8 +246,8 @@ async def add_litellm_data_to_request(
),
)
if get_forwardable_headers(_headers) != {}:
data["headers"] = get_forwardable_headers(_headers)
data.update(add_litellm_data_for_backend_llm_call(_headers, general_settings))
# Include original request and headers in the data
data["proxy_server_request"] = {
"url": str(request.url),