mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
(Feat) add `"/v1/batches/{batch_id:path}/cancel" endpoint (#7406)
* use 1 file for azure batches handling * add cancel_batch endpoint * add a cancel batch on open ai * add cancel_batch endpoint * add cancel batches to test * remove unused imports * test_batches_operations * update test_batches_operations
This commit is contained in:
parent
3ac54483a7
commit
81be0b4090
7 changed files with 589 additions and 304 deletions
|
@ -1799,6 +1799,15 @@ class OpenAIBatchesAPI(BaseLLM):
|
|||
response = openai_client.batches.retrieve(**retrieve_batch_data)
|
||||
return response
|
||||
|
||||
async def acancel_batch(
|
||||
self,
|
||||
cancel_batch_data: CancelBatchRequest,
|
||||
openai_client: AsyncOpenAI,
|
||||
) -> Batch:
|
||||
verbose_logger.debug("async cancelling batch, args= %s", cancel_batch_data)
|
||||
response = await openai_client.batches.cancel(**cancel_batch_data)
|
||||
return response
|
||||
|
||||
def cancel_batch(
|
||||
self,
|
||||
_is_async: bool,
|
||||
|
@ -1823,6 +1832,16 @@ class OpenAIBatchesAPI(BaseLLM):
|
|||
raise ValueError(
|
||||
"OpenAI client is not initialized. Make sure api_key is passed or OPENAI_API_KEY is set in the environment."
|
||||
)
|
||||
|
||||
if _is_async is True:
|
||||
if not isinstance(openai_client, AsyncOpenAI):
|
||||
raise ValueError(
|
||||
"OpenAI client is not an instance of AsyncOpenAI. Make sure you passed an AsyncOpenAI client."
|
||||
)
|
||||
return self.acancel_batch( # type: ignore
|
||||
cancel_batch_data=cancel_batch_data, openai_client=openai_client
|
||||
)
|
||||
|
||||
response = openai_client.batches.cancel(**cancel_batch_data)
|
||||
return response
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue