mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix(files/main.py): pass litellm params to azure route
This commit is contained in:
parent
32ec7e27f8
commit
8b09a2721f
3 changed files with 24 additions and 3 deletions
|
@ -473,9 +473,11 @@ def file_delete(
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
optional_params = GenericLiteLLMParams(**kwargs)
|
optional_params = GenericLiteLLMParams(**kwargs)
|
||||||
|
litellm_params_dict = get_litellm_params(**kwargs)
|
||||||
### TIMEOUT LOGIC ###
|
### TIMEOUT LOGIC ###
|
||||||
timeout = optional_params.timeout or kwargs.get("request_timeout", 600) or 600
|
timeout = optional_params.timeout or kwargs.get("request_timeout", 600) or 600
|
||||||
# set timeout for 10 minutes by default
|
# set timeout for 10 minutes by default
|
||||||
|
client = kwargs.get("client")
|
||||||
|
|
||||||
if (
|
if (
|
||||||
timeout is not None
|
timeout is not None
|
||||||
|
@ -549,6 +551,8 @@ def file_delete(
|
||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
max_retries=optional_params.max_retries,
|
max_retries=optional_params.max_retries,
|
||||||
file_id=file_id,
|
file_id=file_id,
|
||||||
|
client=client,
|
||||||
|
litellm_params=litellm_params_dict,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise litellm.exceptions.BadRequestError(
|
raise litellm.exceptions.BadRequestError(
|
||||||
|
@ -774,8 +778,10 @@ def file_content(
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
optional_params = GenericLiteLLMParams(**kwargs)
|
optional_params = GenericLiteLLMParams(**kwargs)
|
||||||
|
litellm_params_dict = get_litellm_params(**kwargs)
|
||||||
### TIMEOUT LOGIC ###
|
### TIMEOUT LOGIC ###
|
||||||
timeout = optional_params.timeout or kwargs.get("request_timeout", 600) or 600
|
timeout = optional_params.timeout or kwargs.get("request_timeout", 600) or 600
|
||||||
|
client = kwargs.get("client")
|
||||||
# set timeout for 10 minutes by default
|
# set timeout for 10 minutes by default
|
||||||
|
|
||||||
if (
|
if (
|
||||||
|
@ -797,6 +803,7 @@ def file_content(
|
||||||
)
|
)
|
||||||
|
|
||||||
_is_async = kwargs.pop("afile_content", False) is True
|
_is_async = kwargs.pop("afile_content", False) is True
|
||||||
|
|
||||||
if custom_llm_provider == "openai":
|
if custom_llm_provider == "openai":
|
||||||
# for deepinfra/perplexity/anyscale/groq we check in get_llm_provider and pass in the api base from there
|
# for deepinfra/perplexity/anyscale/groq we check in get_llm_provider and pass in the api base from there
|
||||||
api_base = (
|
api_base = (
|
||||||
|
@ -858,6 +865,8 @@ def file_content(
|
||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
max_retries=optional_params.max_retries,
|
max_retries=optional_params.max_retries,
|
||||||
file_content_request=_file_content_request,
|
file_content_request=_file_content_request,
|
||||||
|
client=client,
|
||||||
|
litellm_params=litellm_params_dict,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise litellm.exceptions.BadRequestError(
|
raise litellm.exceptions.BadRequestError(
|
||||||
|
|
|
@ -3121,13 +3121,18 @@ class Router:
|
||||||
elif call_type in (
|
elif call_type in (
|
||||||
"anthropic_messages",
|
"anthropic_messages",
|
||||||
"aresponses",
|
"aresponses",
|
||||||
"afile_delete",
|
|
||||||
"afile_content",
|
|
||||||
):
|
):
|
||||||
return await self._ageneric_api_call_with_fallbacks(
|
return await self._ageneric_api_call_with_fallbacks(
|
||||||
original_function=original_function,
|
original_function=original_function,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
elif call_type in ("afile_delete", "afile_content"):
|
||||||
|
return await self._ageneric_api_call_with_fallbacks(
|
||||||
|
original_function=original_function,
|
||||||
|
custom_llm_provider=custom_llm_provider,
|
||||||
|
client=client,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
return async_wrapper
|
return async_wrapper
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,6 @@ def setup_mocks():
|
||||||
) as mock_logger, patch(
|
) as mock_logger, patch(
|
||||||
"litellm.llms.azure.common_utils.select_azure_base_url_or_endpoint"
|
"litellm.llms.azure.common_utils.select_azure_base_url_or_endpoint"
|
||||||
) as mock_select_url:
|
) as mock_select_url:
|
||||||
|
|
||||||
# Configure mocks
|
# Configure mocks
|
||||||
mock_litellm.AZURE_DEFAULT_API_VERSION = "2023-05-15"
|
mock_litellm.AZURE_DEFAULT_API_VERSION = "2023-05-15"
|
||||||
mock_litellm.enable_azure_ad_token_refresh = False
|
mock_litellm.enable_azure_ad_token_refresh = False
|
||||||
|
@ -303,6 +302,14 @@ async def test_ensure_initialize_azure_sdk_client_always_used(call_type):
|
||||||
"file": MagicMock(),
|
"file": MagicMock(),
|
||||||
"purpose": "assistants",
|
"purpose": "assistants",
|
||||||
},
|
},
|
||||||
|
"afile_content": {
|
||||||
|
"custom_llm_provider": "azure",
|
||||||
|
"file_id": "123",
|
||||||
|
},
|
||||||
|
"afile_delete": {
|
||||||
|
"custom_llm_provider": "azure",
|
||||||
|
"file_id": "123",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
# Get appropriate input for this call type
|
# Get appropriate input for this call type
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue