mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
(feat) add Vertex Batches API support in OpenAI format (#7032)
* working request * working transform * working request * transform vertex batch response * add _async_create_batch * move gcs functions to base * fix _get_content_from_openai_file * transform_openai_file_content_to_vertex_ai_file_content * fix transform vertex gcs bucket upload to OAI files format * working e2e test * _get_gcs_object_name * fix linting * add doc string * fix transform_gcs_bucket_response_to_openai_file_object * use vertex for batch endpoints * add batches support for vertex * test_vertex_batches_endpoint * test_vertex_batch_prediction * fix gcs bucket base auth * docs clean up batches * docs Batch API * docs vertex batches api * test_get_gcs_logging_config_without_service_account * undo change * fix vertex md * test_get_gcs_logging_config_without_service_account * ci/cd run again
This commit is contained in:
parent
dd5ccdd889
commit
0eef9df396
20 changed files with 1347 additions and 424 deletions
|
@ -11,8 +11,7 @@ from dotenv import load_dotenv
|
|||
load_dotenv()
|
||||
sys.path.insert(
|
||||
0, os.path.abspath("../..")
|
||||
) # Adds the parent directory to the system path
|
||||
import asyncio
|
||||
) # Adds the parent directory to the system-path
|
||||
import logging
|
||||
import time
|
||||
|
||||
|
@ -20,6 +19,10 @@ import pytest
|
|||
|
||||
import litellm
|
||||
from litellm import create_batch, create_file
|
||||
from litellm._logging import verbose_logger
|
||||
from test_gcs_bucket import load_vertex_ai_credentials
|
||||
|
||||
verbose_logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("provider", ["openai"]) # , "azure"
|
||||
|
@ -206,3 +209,32 @@ def test_cancel_batch():
|
|||
|
||||
def test_list_batch():
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_vertex_batch_prediction():
|
||||
load_vertex_ai_credentials()
|
||||
file_name = "vertex_batch_completions.jsonl"
|
||||
_current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
file_path = os.path.join(_current_dir, file_name)
|
||||
file_obj = await litellm.acreate_file(
|
||||
file=open(file_path, "rb"),
|
||||
purpose="batch",
|
||||
custom_llm_provider="vertex_ai",
|
||||
)
|
||||
print("Response from creating file=", file_obj)
|
||||
|
||||
batch_input_file_id = file_obj.id
|
||||
assert (
|
||||
batch_input_file_id is not None
|
||||
), f"Failed to create file, expected a non null file_id but got {batch_input_file_id}"
|
||||
|
||||
create_batch_response = await litellm.acreate_batch(
|
||||
completion_window="24h",
|
||||
endpoint="/v1/chat/completions",
|
||||
input_file_id=batch_input_file_id,
|
||||
custom_llm_provider="vertex_ai",
|
||||
metadata={"key1": "value1", "key2": "value2"},
|
||||
)
|
||||
print("create_batch_response=", create_batch_response)
|
||||
pass
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue