(feat) Add basic logging support for /batches endpoints (#7381)

* add basic logging for create`batch`

* add create_batch as a call type

* add basic dd logging for batches

* basic batch creation logging on DD
This commit is contained in:
Ishaan Jaff 2024-12-23 17:45:03 -08:00 committed by GitHub
parent 6f6c651ee0
commit 87f19d6f13
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 81 additions and 43 deletions

View file

@ -16,7 +16,7 @@ import logging
import time
import pytest
from typing import Optional
import litellm
from litellm import create_batch, create_file
from litellm._logging import verbose_logger
@ -24,14 +24,36 @@ from test_gcs_bucket import load_vertex_ai_credentials
verbose_logger.setLevel(logging.DEBUG)
from litellm.integrations.custom_logger import CustomLogger
from litellm.types.utils import StandardLoggingPayload
class TestCustomLogger(CustomLogger):
def __init__(self):
super().__init__()
self.standard_logging_object: Optional[StandardLoggingPayload] = None
async def async_log_success_event(self, kwargs, response_obj, start_time, end_time):
print(
"Success event logged with kwargs=",
kwargs,
"and response_obj=",
response_obj,
)
self.standard_logging_object = kwargs["standard_logging_object"]
@pytest.mark.parametrize("provider", ["openai"]) # , "azure"
def test_create_batch(provider):
@pytest.mark.asyncio
async def test_create_batch(provider):
"""
1. Create File for Batch completion
2. Create Batch Request
3. Retrieve the specific batch
"""
custom_logger = TestCustomLogger()
litellm.callbacks = [custom_logger, "datadog"]
if provider == "azure":
# Don't have anymore Azure Quota
return
@ -39,7 +61,7 @@ def test_create_batch(provider):
_current_dir = os.path.dirname(os.path.abspath(__file__))
file_path = os.path.join(_current_dir, file_name)
file_obj = litellm.create_file(
file_obj = await litellm.acreate_file(
file=open(file_path, "rb"),
purpose="batch",
custom_llm_provider=provider,
@ -51,8 +73,8 @@ def test_create_batch(provider):
batch_input_file_id is not None
), "Failed to create file, expected a non null file_id but got {batch_input_file_id}"
time.sleep(5)
create_batch_response = litellm.create_batch(
await asyncio.sleep(1)
create_batch_response = await litellm.acreate_batch(
completion_window="24h",
endpoint="/v1/chat/completions",
input_file_id=batch_input_file_id,
@ -61,7 +83,14 @@ def test_create_batch(provider):
)
print("response from litellm.create_batch=", create_batch_response)
await asyncio.sleep(6)
# Assert that the create batch event is logged on CustomLogger
assert custom_logger.standard_logging_object is not None
print(
"standard_logging_object=",
json.dumps(custom_logger.standard_logging_object, indent=4),
)
assert (
create_batch_response.id is not None
), f"Failed to create batch, expected a non null batch_id but got {create_batch_response.id}"
@ -73,7 +102,7 @@ def test_create_batch(provider):
create_batch_response.input_file_id == batch_input_file_id
), f"Failed to create batch, expected input_file_id to be {batch_input_file_id} but got {create_batch_response.input_file_id}"
retrieved_batch = litellm.retrieve_batch(
retrieved_batch = await litellm.aretrieve_batch(
batch_id=create_batch_response.id, custom_llm_provider=provider
)
print("retrieved batch=", retrieved_batch)
@ -82,10 +111,10 @@ def test_create_batch(provider):
assert retrieved_batch.id == create_batch_response.id
# list all batches
list_batches = litellm.list_batches(custom_llm_provider=provider, limit=2)
list_batches = await litellm.alist_batches(custom_llm_provider=provider, limit=2)
print("list_batches=", list_batches)
file_content = litellm.file_content(
file_content = await litellm.afile_content(
file_id=batch_input_file_id, custom_llm_provider=provider
)