mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 19:54:13 +00:00
add all openai file endpoints
This commit is contained in:
parent
e83d0314ce
commit
b4138cf9e5
1 changed files with 100 additions and 0 deletions
|
@ -18,6 +18,7 @@ import httpx
|
||||||
import openai
|
import openai
|
||||||
from openai import AsyncOpenAI, OpenAI
|
from openai import AsyncOpenAI, OpenAI
|
||||||
from openai.types.beta.assistant_deleted import AssistantDeleted
|
from openai.types.beta.assistant_deleted import AssistantDeleted
|
||||||
|
from openai.types.file_deleted import FileDeleted
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from typing_extensions import overload, override
|
from typing_extensions import overload, override
|
||||||
|
|
||||||
|
@ -2110,6 +2111,105 @@ class OpenAIFilesAPI(BaseLLM):
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
async def adelete_file(
|
||||||
|
self,
|
||||||
|
file_id: str,
|
||||||
|
openai_client: AsyncOpenAI,
|
||||||
|
) -> FileDeleted:
|
||||||
|
response = await openai_client.files.delete(file_id=file_id)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def delete_file(
|
||||||
|
self,
|
||||||
|
_is_async: bool,
|
||||||
|
file_id: str,
|
||||||
|
api_base: str,
|
||||||
|
api_key: Optional[str],
|
||||||
|
timeout: Union[float, httpx.Timeout],
|
||||||
|
max_retries: Optional[int],
|
||||||
|
organization: Optional[str],
|
||||||
|
client: Optional[Union[OpenAI, AsyncOpenAI]] = None,
|
||||||
|
):
|
||||||
|
openai_client: Optional[Union[OpenAI, AsyncOpenAI]] = self.get_openai_client(
|
||||||
|
api_key=api_key,
|
||||||
|
api_base=api_base,
|
||||||
|
timeout=timeout,
|
||||||
|
max_retries=max_retries,
|
||||||
|
organization=organization,
|
||||||
|
client=client,
|
||||||
|
_is_async=_is_async,
|
||||||
|
)
|
||||||
|
if openai_client is None:
|
||||||
|
raise ValueError(
|
||||||
|
"OpenAI client is not initialized. Make sure api_key is passed or OPENAI_API_KEY is set in the environment."
|
||||||
|
)
|
||||||
|
|
||||||
|
if _is_async is True:
|
||||||
|
if not isinstance(openai_client, AsyncOpenAI):
|
||||||
|
raise ValueError(
|
||||||
|
"OpenAI client is not an instance of AsyncOpenAI. Make sure you passed an AsyncOpenAI client."
|
||||||
|
)
|
||||||
|
return self.adelete_file( # type: ignore
|
||||||
|
file_id=file_id,
|
||||||
|
openai_client=openai_client,
|
||||||
|
)
|
||||||
|
response = openai_client.files.delete(file_id=file_id)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
async def alist_files(
|
||||||
|
self,
|
||||||
|
openai_client: AsyncOpenAI,
|
||||||
|
purpose: Optional[str] = None,
|
||||||
|
):
|
||||||
|
if isinstance(purpose, str):
|
||||||
|
response = await openai_client.files.list(purpose=purpose)
|
||||||
|
else:
|
||||||
|
response = await openai_client.files.list()
|
||||||
|
return response
|
||||||
|
|
||||||
|
def list_files(
|
||||||
|
self,
|
||||||
|
_is_async: bool,
|
||||||
|
api_base: str,
|
||||||
|
api_key: Optional[str],
|
||||||
|
timeout: Union[float, httpx.Timeout],
|
||||||
|
max_retries: Optional[int],
|
||||||
|
organization: Optional[str],
|
||||||
|
purpose: Optional[str] = None,
|
||||||
|
client: Optional[Union[OpenAI, AsyncOpenAI]] = None,
|
||||||
|
):
|
||||||
|
openai_client: Optional[Union[OpenAI, AsyncOpenAI]] = self.get_openai_client(
|
||||||
|
api_key=api_key,
|
||||||
|
api_base=api_base,
|
||||||
|
timeout=timeout,
|
||||||
|
max_retries=max_retries,
|
||||||
|
organization=organization,
|
||||||
|
client=client,
|
||||||
|
_is_async=_is_async,
|
||||||
|
)
|
||||||
|
if openai_client is None:
|
||||||
|
raise ValueError(
|
||||||
|
"OpenAI client is not initialized. Make sure api_key is passed or OPENAI_API_KEY is set in the environment."
|
||||||
|
)
|
||||||
|
|
||||||
|
if _is_async is True:
|
||||||
|
if not isinstance(openai_client, AsyncOpenAI):
|
||||||
|
raise ValueError(
|
||||||
|
"OpenAI client is not an instance of AsyncOpenAI. Make sure you passed an AsyncOpenAI client."
|
||||||
|
)
|
||||||
|
return self.alist_files( # type: ignore
|
||||||
|
purpose=purpose,
|
||||||
|
openai_client=openai_client,
|
||||||
|
)
|
||||||
|
|
||||||
|
if isinstance(purpose, str):
|
||||||
|
response = openai_client.files.list(purpose=purpose)
|
||||||
|
else:
|
||||||
|
response = openai_client.files.list()
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
class OpenAIBatchesAPI(BaseLLM):
|
class OpenAIBatchesAPI(BaseLLM):
|
||||||
"""
|
"""
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue