(Feat - Batches API) add support for retrieving vertex api batch jobs (#7661)

* add _async_retrieve_batch

* fix aretrieve_batch

* fix _get_batch_id_from_vertex_ai_batch_response

* fix batches docs
This commit is contained in:
Ishaan Jaff 2025-01-09 18:35:03 -08:00 committed by GitHub
parent 2507c275f6
commit 13f364682d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 177 additions and 2 deletions

View file

@ -416,6 +416,32 @@ def retrieve_batch(
max_retries=optional_params.max_retries,
retrieve_batch_data=_retrieve_batch_request,
)
elif custom_llm_provider == "vertex_ai":
api_base = optional_params.api_base or ""
vertex_ai_project = (
optional_params.vertex_project
or litellm.vertex_project
or get_secret_str("VERTEXAI_PROJECT")
)
vertex_ai_location = (
optional_params.vertex_location
or litellm.vertex_location
or get_secret_str("VERTEXAI_LOCATION")
)
vertex_credentials = optional_params.vertex_credentials or get_secret_str(
"VERTEXAI_CREDENTIALS"
)
response = vertex_ai_batches_instance.retrieve_batch(
_is_async=_is_async,
batch_id=batch_id,
api_base=api_base,
vertex_project=vertex_ai_project,
vertex_location=vertex_ai_location,
vertex_credentials=vertex_credentials,
timeout=timeout,
max_retries=optional_params.max_retries,
)
else:
raise litellm.exceptions.BadRequestError(
message="LiteLLM doesn't support {} for 'create_batch'. Only 'openai' is supported.".format(