diff --git a/litellm/llms/vertex_httpx.py b/litellm/llms/vertex_httpx.py index 94fbd0a13e..c784bf2e89 100644 --- a/litellm/llms/vertex_httpx.py +++ b/litellm/llms/vertex_httpx.py @@ -1586,6 +1586,8 @@ class VertexLLM(BaseLLM): if "instances" in optional_params: request_data["instances"] = optional_params["instances"] + elif isinstance(input, list): + request_data["instances"] = input else: # construct instances vertex_request_instance = Instance(**optional_params) diff --git a/litellm/tests/test_amazing_vertex_completion.py b/litellm/tests/test_amazing_vertex_completion.py index b7fc332417..75868a2f42 100644 --- a/litellm/tests/test_amazing_vertex_completion.py +++ b/litellm/tests/test_amazing_vertex_completion.py @@ -1836,7 +1836,7 @@ async def test_vertexai_multimodal_embedding(): litellm.set_verbose = True response = await litellm.aembedding( model="vertex_ai/multimodalembedding@001", - instances=[ + input=[ { "image": { "gcsUri": "gs://cloud-samples-data/vertex-ai/llm/prompts/landmark1.png"