mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
(Bug fix) - allow using Assistants GET, DELETE on /openai
pass through routes (#8818)
* test_openai_assistants_e2e_operations * test openai assistants pass through * fix GET request on pass through handler * _make_non_streaming_http_request * _is_assistants_api_request * test_openai_assistants_e2e_operations * test_openai_assistants_e2e_operations * openai_proxy_route * docs openai pass through * docs openai pass through * docs openai pass through * test pass through handler * Potential fix for code scanning alert no. 2240: Incomplete URL substring sanitization Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com> --------- Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com>
This commit is contained in:
parent
142276b468
commit
11fd5094c7
8 changed files with 572 additions and 84 deletions
|
@ -14,6 +14,7 @@ from fastapi import APIRouter, Depends, HTTPException, Request, Response
|
|||
import litellm
|
||||
from litellm.constants import BEDROCK_AGENT_RUNTIME_PASS_THROUGH_ROUTES
|
||||
from litellm.proxy._types import *
|
||||
from litellm.proxy.auth.route_checks import RouteChecks
|
||||
from litellm.proxy.auth.user_api_key_auth import user_api_key_auth
|
||||
from litellm.proxy.pass_through_endpoints.pass_through_endpoints import (
|
||||
create_pass_through_route,
|
||||
|
@ -405,7 +406,7 @@ async def azure_proxy_route(
|
|||
"Required 'AZURE_API_KEY' in environment to make pass-through calls to Azure."
|
||||
)
|
||||
|
||||
return await _base_openai_pass_through_handler(
|
||||
return await BaseOpenAIPassThroughHandler._base_openai_pass_through_handler(
|
||||
endpoint=endpoint,
|
||||
request=request,
|
||||
fastapi_response=fastapi_response,
|
||||
|
@ -431,7 +432,7 @@ async def openai_proxy_route(
|
|||
|
||||
|
||||
"""
|
||||
base_target_url = "https://api.openai.com"
|
||||
base_target_url = "https://api.openai.com/"
|
||||
# Add or update query parameters
|
||||
openai_api_key = passthrough_endpoint_router.get_credentials(
|
||||
custom_llm_provider="openai",
|
||||
|
@ -442,7 +443,7 @@ async def openai_proxy_route(
|
|||
"Required 'OPENAI_API_KEY' in environment to make pass-through calls to OpenAI."
|
||||
)
|
||||
|
||||
return await _base_openai_pass_through_handler(
|
||||
return await BaseOpenAIPassThroughHandler._base_openai_pass_through_handler(
|
||||
endpoint=endpoint,
|
||||
request=request,
|
||||
fastapi_response=fastapi_response,
|
||||
|
@ -452,44 +453,99 @@ async def openai_proxy_route(
|
|||
)
|
||||
|
||||
|
||||
async def _base_openai_pass_through_handler(
|
||||
endpoint: str,
|
||||
request: Request,
|
||||
fastapi_response: Response,
|
||||
user_api_key_dict: UserAPIKeyAuth,
|
||||
base_target_url: str,
|
||||
api_key: str,
|
||||
):
|
||||
encoded_endpoint = httpx.URL(endpoint).path
|
||||
class BaseOpenAIPassThroughHandler:
|
||||
@staticmethod
|
||||
async def _base_openai_pass_through_handler(
|
||||
endpoint: str,
|
||||
request: Request,
|
||||
fastapi_response: Response,
|
||||
user_api_key_dict: UserAPIKeyAuth,
|
||||
base_target_url: str,
|
||||
api_key: str,
|
||||
):
|
||||
encoded_endpoint = httpx.URL(endpoint).path
|
||||
|
||||
# Ensure endpoint starts with '/' for proper URL construction
|
||||
if not encoded_endpoint.startswith("/"):
|
||||
encoded_endpoint = "/" + encoded_endpoint
|
||||
# Ensure endpoint starts with '/' for proper URL construction
|
||||
if not encoded_endpoint.startswith("/"):
|
||||
encoded_endpoint = "/" + encoded_endpoint
|
||||
|
||||
# Construct the full target URL using httpx
|
||||
base_url = httpx.URL(base_target_url)
|
||||
updated_url = base_url.copy_with(path=encoded_endpoint)
|
||||
# Ensure base_target_url is properly formatted for OpenAI
|
||||
base_target_url = (
|
||||
BaseOpenAIPassThroughHandler._append_v1_to_openai_passthrough_url(
|
||||
base_target_url
|
||||
)
|
||||
)
|
||||
|
||||
## check for streaming
|
||||
is_streaming_request = False
|
||||
if "stream" in str(updated_url):
|
||||
is_streaming_request = True
|
||||
# Construct the full target URL by properly joining the base URL and endpoint path
|
||||
base_url = httpx.URL(base_target_url)
|
||||
updated_url = BaseOpenAIPassThroughHandler._join_url_paths(
|
||||
base_url, encoded_endpoint
|
||||
)
|
||||
|
||||
## CREATE PASS-THROUGH
|
||||
endpoint_func = create_pass_through_route(
|
||||
endpoint=endpoint,
|
||||
target=str(updated_url),
|
||||
custom_headers={
|
||||
## check for streaming
|
||||
is_streaming_request = False
|
||||
if "stream" in str(updated_url):
|
||||
is_streaming_request = True
|
||||
|
||||
## CREATE PASS-THROUGH
|
||||
endpoint_func = create_pass_through_route(
|
||||
endpoint=endpoint,
|
||||
target=str(updated_url),
|
||||
custom_headers=BaseOpenAIPassThroughHandler._assemble_headers(
|
||||
api_key=api_key, request=request
|
||||
),
|
||||
) # dynamically construct pass-through endpoint based on incoming path
|
||||
received_value = await endpoint_func(
|
||||
request,
|
||||
fastapi_response,
|
||||
user_api_key_dict,
|
||||
stream=is_streaming_request, # type: ignore
|
||||
query_params=dict(request.query_params), # type: ignore
|
||||
)
|
||||
|
||||
return received_value
|
||||
|
||||
@staticmethod
|
||||
def _append_v1_to_openai_passthrough_url(base_url: str) -> str:
|
||||
"""
|
||||
Appends the /v1 path to the OpenAI base URL if it's the OpenAI API URL
|
||||
"""
|
||||
if base_url.rstrip("/") == "https://api.openai.com":
|
||||
return "https://api.openai.com/v1"
|
||||
return base_url
|
||||
|
||||
@staticmethod
|
||||
def _append_openai_beta_header(headers: dict, request: Request) -> dict:
|
||||
"""
|
||||
Appends the OpenAI-Beta header to the headers if the request is an OpenAI Assistants API request
|
||||
"""
|
||||
if RouteChecks._is_assistants_api_request(request) is True:
|
||||
headers["OpenAI-Beta"] = "assistants=v2"
|
||||
return headers
|
||||
|
||||
@staticmethod
|
||||
def _assemble_headers(api_key: str, request: Request) -> dict:
|
||||
base_headers = {
|
||||
"authorization": "Bearer {}".format(api_key),
|
||||
"api-key": "{}".format(api_key),
|
||||
},
|
||||
) # dynamically construct pass-through endpoint based on incoming path
|
||||
received_value = await endpoint_func(
|
||||
request,
|
||||
fastapi_response,
|
||||
user_api_key_dict,
|
||||
stream=is_streaming_request, # type: ignore
|
||||
query_params=dict(request.query_params), # type: ignore
|
||||
)
|
||||
}
|
||||
return BaseOpenAIPassThroughHandler._append_openai_beta_header(
|
||||
headers=base_headers,
|
||||
request=request,
|
||||
)
|
||||
|
||||
return received_value
|
||||
@staticmethod
|
||||
def _join_url_paths(base_url: httpx.URL, path: str) -> httpx.URL:
|
||||
"""
|
||||
Properly joins a base URL with a path, preserving any existing path in the base URL.
|
||||
"""
|
||||
if not base_url.path or base_url.path == "/":
|
||||
# If base URL has no path, just use the new path
|
||||
return base_url.copy_with(path=path)
|
||||
|
||||
# Join paths correctly by removing trailing/leading slashes as needed
|
||||
base_path = base_url.path.rstrip("/")
|
||||
clean_path = path.lstrip("/")
|
||||
full_path = f"{base_path}/{clean_path}"
|
||||
|
||||
return base_url.copy_with(path=full_path)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue