mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
add test for ft endpoints on azure
This commit is contained in:
parent
91c787a77b
commit
7cded198c2
4 changed files with 74 additions and 5 deletions
|
@ -34,6 +34,34 @@ from litellm.proxy.auth.user_api_key_auth import user_api_key_auth
|
|||
|
||||
router = APIRouter()
|
||||
|
||||
files_config = None
|
||||
|
||||
|
||||
def set_files_config(config):
|
||||
global files_config
|
||||
if not isinstance(config, list):
|
||||
raise ValueError("invalid files config, expected a list is not a list")
|
||||
|
||||
for element in config:
|
||||
if isinstance(element, dict):
|
||||
for key, value in element.items():
|
||||
if isinstance(value, str) and value.startswith("os.environ/"):
|
||||
element[key] = litellm.get_secret(value)
|
||||
|
||||
files_config = config
|
||||
|
||||
|
||||
def get_files_provider_config(
|
||||
custom_llm_provider: str,
|
||||
):
|
||||
global files_config
|
||||
if files_config is None:
|
||||
raise ValueError("files_config is not set, set it on your config.yaml file.")
|
||||
for setting in files_config:
|
||||
if setting.get("custom_llm_provider") == custom_llm_provider:
|
||||
return setting
|
||||
return None
|
||||
|
||||
|
||||
@router.post(
|
||||
"/v1/files",
|
||||
|
@ -49,6 +77,7 @@ async def create_file(
|
|||
request: Request,
|
||||
fastapi_response: Response,
|
||||
purpose: str = Form(...),
|
||||
custom_llm_provider: str = Form(...),
|
||||
file: UploadFile = File(...),
|
||||
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
|
||||
):
|
||||
|
@ -100,11 +129,17 @@ async def create_file(
|
|||
|
||||
_create_file_request = CreateFileRequest(file=file_data, **data)
|
||||
|
||||
# for now use custom_llm_provider=="openai" -> this will change as LiteLLM adds more providers for acreate_batch
|
||||
response = await litellm.acreate_file(
|
||||
custom_llm_provider="openai", **_create_file_request
|
||||
# get configs for custom_llm_provider
|
||||
llm_provider_config = get_files_provider_config(
|
||||
custom_llm_provider=custom_llm_provider
|
||||
)
|
||||
|
||||
# add llm_provider_config to data
|
||||
_create_file_request.update(llm_provider_config)
|
||||
|
||||
# for now use custom_llm_provider=="openai" -> this will change as LiteLLM adds more providers for acreate_batch
|
||||
response = await litellm.acreate_file(**_create_file_request)
|
||||
|
||||
### ALERTING ###
|
||||
asyncio.create_task(
|
||||
proxy_logging_obj.update_request_status(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue