mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
(feat) pass through llm endpoints - add PATCH
support (vertex context caching requires for update ops) (#6924)
* add PATCH for pass through endpoints * test_pass_through_routes_support_all_methods
This commit is contained in:
parent
4019ca38b4
commit
41aa604e6b
4 changed files with 45 additions and 8 deletions
|
@ -143,12 +143,14 @@ def construct_target_url(
|
|||
|
||||
@router.api_route(
|
||||
"/vertex-ai/{endpoint:path}",
|
||||
methods=["GET", "POST", "PUT", "DELETE"],
|
||||
methods=["GET", "POST", "PUT", "DELETE", "PATCH"],
|
||||
tags=["Vertex AI Pass-through", "pass-through"],
|
||||
include_in_schema=False,
|
||||
)
|
||||
@router.api_route(
|
||||
"/vertex_ai/{endpoint:path}", methods=["GET", "POST", "PUT", "DELETE"], tags=["Vertex AI Pass-through", "pass-through"]
|
||||
"/vertex_ai/{endpoint:path}",
|
||||
methods=["GET", "POST", "PUT", "DELETE", "PATCH"],
|
||||
tags=["Vertex AI Pass-through", "pass-through"],
|
||||
)
|
||||
async def vertex_proxy_route(
|
||||
endpoint: str,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue