forked from phoenix/litellm-mirror
add verbose_logger.debug to retrieve batch
This commit is contained in:
parent
f627fa9b40
commit
2541d5f625
1 changed files with 2 additions and 0 deletions
|
@ -24,6 +24,7 @@ from pydantic import BaseModel
|
||||||
from typing_extensions import overload, override
|
from typing_extensions import overload, override
|
||||||
|
|
||||||
import litellm
|
import litellm
|
||||||
|
from litellm._logging import verbose_logger
|
||||||
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
|
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
|
||||||
from litellm.types.utils import ProviderField
|
from litellm.types.utils import ProviderField
|
||||||
from litellm.utils import (
|
from litellm.utils import (
|
||||||
|
@ -2534,6 +2535,7 @@ class OpenAIBatchesAPI(BaseLLM):
|
||||||
retrieve_batch_data: RetrieveBatchRequest,
|
retrieve_batch_data: RetrieveBatchRequest,
|
||||||
openai_client: AsyncOpenAI,
|
openai_client: AsyncOpenAI,
|
||||||
) -> Batch:
|
) -> Batch:
|
||||||
|
verbose_logger.debug("retrieving batch, args= %s", retrieve_batch_data)
|
||||||
response = await openai_client.batches.retrieve(**retrieve_batch_data)
|
response = await openai_client.batches.retrieve(**retrieve_batch_data)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue