fix: fix linting errors

This commit is contained in:
Krrish Dholakia 2024-08-22 15:51:59 -07:00
parent 8625663458
commit 63cd94c32a
4 changed files with 16 additions and 16 deletions

View file

@ -46,7 +46,7 @@ async def acreate_batch(
completion_window: Literal["24h"],
endpoint: Literal["/v1/chat/completions", "/v1/embeddings", "/v1/completions"],
input_file_id: str,
custom_llm_provider: str = "openai",
custom_llm_provider: Literal["openai", "azure"] = "openai",
metadata: Optional[Dict[str, str]] = None,
extra_headers: Optional[Dict[str, str]] = None,
extra_body: Optional[Dict[str, str]] = None,
@ -215,7 +215,7 @@ def create_batch(
async def aretrieve_batch(
batch_id: str,
custom_llm_provider: str = "openai",
custom_llm_provider: Literal["openai", "azure"] = "openai",
metadata: Optional[Dict[str, str]] = None,
extra_headers: Optional[Dict[str, str]] = None,
extra_body: Optional[Dict[str, str]] = None,
@ -379,7 +379,7 @@ def retrieve_batch(
async def alist_batches(
after: Optional[str] = None,
limit: Optional[int] = None,
custom_llm_provider: str = "openai",
custom_llm_provider: Literal["openai", "azure"] = "openai",
metadata: Optional[Dict[str, str]] = None,
extra_headers: Optional[Dict[str, str]] = None,
extra_body: Optional[Dict[str, str]] = None,

View file

@ -35,7 +35,7 @@ azure_files_instance = AzureOpenAIFilesAPI()
async def afile_retrieve(
file_id: str,
custom_llm_provider: str = "openai",
custom_llm_provider: Literal["openai", "azure"] = "openai",
extra_headers: Optional[Dict[str, str]] = None,
extra_body: Optional[Dict[str, str]] = None,
**kwargs,
@ -189,7 +189,7 @@ def file_retrieve(
# Delete file
async def afile_delete(
file_id: str,
custom_llm_provider: str = "openai",
custom_llm_provider: Literal["openai", "azure"] = "openai",
extra_headers: Optional[Dict[str, str]] = None,
extra_body: Optional[Dict[str, str]] = None,
**kwargs,
@ -339,7 +339,7 @@ def file_delete(
# List files
async def afile_list(
custom_llm_provider: str = "openai",
custom_llm_provider: Literal["openai", "azure"] = "openai",
purpose: Optional[str] = None,
extra_headers: Optional[Dict[str, str]] = None,
extra_body: Optional[Dict[str, str]] = None,
@ -493,7 +493,7 @@ def file_list(
async def acreate_file(
file: FileTypes,
purpose: Literal["assistants", "batch", "fine-tune"],
custom_llm_provider: str = "openai",
custom_llm_provider: Literal["openai", "azure"] = "openai",
extra_headers: Optional[Dict[str, str]] = None,
extra_body: Optional[Dict[str, str]] = None,
**kwargs,
@ -654,7 +654,7 @@ def create_file(
async def afile_content(
file_id: str,
custom_llm_provider: str = "openai",
custom_llm_provider: Literal["openai", "azure"] = "openai",
extra_headers: Optional[Dict[str, str]] = None,
extra_body: Optional[Dict[str, str]] = None,
**kwargs,

View file

@ -150,7 +150,7 @@ async def create_file(
# for now use custom_llm_provider=="openai" -> this will change as LiteLLM adds more providers for acreate_batch
response = await litellm.acreate_file(
**_create_file_request, custom_llm_provider=custom_llm_provider
**_create_file_request, custom_llm_provider=custom_llm_provider # type: ignore
)
### ALERTING ###
@ -265,7 +265,7 @@ async def get_file(
if provider is None: # default to openai
provider = "openai"
response = await litellm.afile_retrieve(
custom_llm_provider=provider, file_id=file_id, **data
custom_llm_provider=provider, file_id=file_id, **data # type: ignore
)
### ALERTING ###
@ -381,7 +381,7 @@ async def delete_file(
if provider is None: # default to openai
provider = "openai"
response = await litellm.afile_delete(
custom_llm_provider=provider, file_id=file_id, **data
custom_llm_provider=provider, file_id=file_id, **data # type: ignore
)
### ALERTING ###
@ -496,7 +496,7 @@ async def list_files(
if provider is None:
provider = "openai"
response = await litellm.afile_list(
custom_llm_provider=provider, purpose=purpose, **data
custom_llm_provider=provider, purpose=purpose, **data # type: ignore
)
### ALERTING ###
@ -611,7 +611,7 @@ async def get_file_content(
if provider is None:
provider = "openai"
response = await litellm.afile_content(
custom_llm_provider=provider, file_id=file_id, **data
custom_llm_provider=provider, file_id=file_id, **data # type: ignore
)
### ALERTING ###

View file

@ -4945,7 +4945,7 @@ async def create_batch(
if provider is None:
provider = "openai"
response = await litellm.acreate_batch(
custom_llm_provider=provider, **_create_batch_data
custom_llm_provider=provider, **_create_batch_data # type: ignore
)
### ALERTING ###
@ -5048,7 +5048,7 @@ async def retrieve_batch(
if provider is None:
provider = "openai"
response = await litellm.aretrieve_batch(
custom_llm_provider=provider, **_retrieve_batch_request
custom_llm_provider=provider, **_retrieve_batch_request # type: ignore
)
### ALERTING ###
@ -5146,7 +5146,7 @@ async def list_batches(
if provider is None:
provider = "openai"
response = await litellm.alist_batches(
custom_llm_provider=provider,
custom_llm_provider=provider, # type: ignore
after=after,
limit=limit,
)