mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
fix(llm_passthrough_endpoints.py): initialize vertex client within route
mitigate credential caching issue
This commit is contained in:
parent
ef6bf02ac4
commit
21de86e20e
1 changed files with 2 additions and 0 deletions
|
@ -448,6 +448,8 @@ async def vertex_proxy_route(
|
|||
get_vertex_project_id_from_url,
|
||||
)
|
||||
|
||||
vertex_llm_base = VertexBase()
|
||||
|
||||
encoded_endpoint = httpx.URL(endpoint).path
|
||||
verbose_proxy_logger.debug("requested endpoint %s", endpoint)
|
||||
headers: dict = {}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue