mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
fix linting
This commit is contained in:
parent
616233089e
commit
e59376d6fc
2 changed files with 6 additions and 17 deletions
|
@ -183,7 +183,9 @@ def create_fine_tuning_job(
|
||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
max_retries=optional_params.max_retries,
|
max_retries=optional_params.max_retries,
|
||||||
_is_async=_is_async,
|
_is_async=_is_async,
|
||||||
client=optional_params.client,
|
client=kwargs.get(
|
||||||
|
"client", None
|
||||||
|
), # note, when we add this to `GenericLiteLLMParams` it impacts a lot of other tests + linting
|
||||||
)
|
)
|
||||||
# Azure OpenAI
|
# Azure OpenAI
|
||||||
elif custom_llm_provider == "azure":
|
elif custom_llm_provider == "azure":
|
||||||
|
@ -389,7 +391,7 @@ def cancel_fine_tuning_job(
|
||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
max_retries=optional_params.max_retries,
|
max_retries=optional_params.max_retries,
|
||||||
_is_async=_is_async,
|
_is_async=_is_async,
|
||||||
client=optional_params.client,
|
client=kwargs.get("client", None),
|
||||||
)
|
)
|
||||||
# Azure OpenAI
|
# Azure OpenAI
|
||||||
elif custom_llm_provider == "azure":
|
elif custom_llm_provider == "azure":
|
||||||
|
@ -552,7 +554,7 @@ def list_fine_tuning_jobs(
|
||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
max_retries=optional_params.max_retries,
|
max_retries=optional_params.max_retries,
|
||||||
_is_async=_is_async,
|
_is_async=_is_async,
|
||||||
client=optional_params.client,
|
client=kwargs.get("client", None),
|
||||||
)
|
)
|
||||||
# Azure OpenAI
|
# Azure OpenAI
|
||||||
elif custom_llm_provider == "azure":
|
elif custom_llm_provider == "azure":
|
||||||
|
@ -704,7 +706,7 @@ def retrieve_fine_tuning_job(
|
||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
max_retries=optional_params.max_retries,
|
max_retries=optional_params.max_retries,
|
||||||
_is_async=_is_async,
|
_is_async=_is_async,
|
||||||
client=optional_params.client,
|
client=kwargs.get("client", None),
|
||||||
)
|
)
|
||||||
# Azure OpenAI
|
# Azure OpenAI
|
||||||
elif custom_llm_provider == "azure":
|
elif custom_llm_provider == "azure":
|
||||||
|
|
|
@ -156,19 +156,6 @@ class GenericLiteLLMParams(BaseModel):
|
||||||
organization: Optional[str] = None # for openai orgs
|
organization: Optional[str] = None # for openai orgs
|
||||||
configurable_clientside_auth_params: CONFIGURABLE_CLIENTSIDE_AUTH_PARAMS = None
|
configurable_clientside_auth_params: CONFIGURABLE_CLIENTSIDE_AUTH_PARAMS = None
|
||||||
|
|
||||||
# for passing in custom OpenAI / Azure OpenAI clients
|
|
||||||
client: Optional[
|
|
||||||
Union[
|
|
||||||
OpenAI,
|
|
||||||
AsyncOpenAI,
|
|
||||||
AzureOpenAI,
|
|
||||||
AsyncAzureOpenAI,
|
|
||||||
AsyncClient,
|
|
||||||
Client,
|
|
||||||
AsyncHTTPHandler,
|
|
||||||
HTTPHandler,
|
|
||||||
]
|
|
||||||
] = None
|
|
||||||
## LOGGING PARAMS ##
|
## LOGGING PARAMS ##
|
||||||
litellm_trace_id: Optional[str] = None
|
litellm_trace_id: Optional[str] = None
|
||||||
## UNIFIED PROJECT/REGION ##
|
## UNIFIED PROJECT/REGION ##
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue