(feat) /batches - track user_api_key_alias, user_api_key_team_alias etc for /batch requests (#7401)

* run azure testing on ci/cd

* update docs on azure batches endpoints

* add input azure.jsonl

* refactor - use separate file for batches endpoints

* fixes for passing custom llm provider to /batch endpoints

* pass custom llm provider to files endpoints

* update azure batches doc

* add info for azure batches api

* update batches endpoints

* use simple helper for raising proxy exception

* update config.yml

* fix imports

* add type hints to get_litellm_params

* update get_litellm_params

* update get_litellm_params

* update get slp

* QOL - stop double logging a create batch operations on custom loggers

* re use slp from og event

* _create_standard_logging_object_for_completed_batch

* fix linting errors

* reduce num changes in PR

* update BATCH_STATUS_POLL_MAX_ATTEMPTS
This commit is contained in:
Ishaan Jaff 2024-12-24 17:44:28 -08:00 committed by GitHub
parent 0627450808
commit e98f1d16fd
9 changed files with 72 additions and 29 deletions

View file

@ -12,6 +12,7 @@ load_dotenv()
sys.path.insert(
0, os.path.abspath("../..")
) # Adds the parent directory to the system-path
import logging
import time
@ -191,12 +192,18 @@ async def test_async_create_batch(provider):
batch_input_file_id is not None
), "Failed to create file, expected a non null file_id but got {batch_input_file_id}"
extra_metadata_field = {
"user_api_key_alias": "special_api_key_alias",
"user_api_key_team_alias": "special_team_alias",
}
create_batch_response = await litellm.acreate_batch(
completion_window="24h",
endpoint="/v1/chat/completions",
input_file_id=batch_input_file_id,
custom_llm_provider=provider,
metadata={"key1": "value1", "key2": "value2"},
# litellm specific param - used for logging metadata on logging callback
litellm_metadata=extra_metadata_field,
)
print("response from litellm.create_batch=", create_batch_response)
@ -215,6 +222,18 @@ async def test_async_create_batch(provider):
await asyncio.sleep(6)
# Assert that the create batch event is logged on CustomLogger
assert custom_logger.standard_logging_object is not None
print(
"standard_logging_object=",
json.dumps(custom_logger.standard_logging_object, indent=4, default=str),
)
assert (
custom_logger.standard_logging_object["metadata"]["user_api_key_alias"]
== extra_metadata_field["user_api_key_alias"]
)
assert (
custom_logger.standard_logging_object["metadata"]["user_api_key_team_alias"]
== extra_metadata_field["user_api_key_team_alias"]
)
retrieved_batch = await litellm.aretrieve_batch(
batch_id=create_batch_response.id, custom_llm_provider=provider