(feat) proxy - save model access requests

This commit is contained in:
ishaan-jaff 2024-02-19 16:35:20 -08:00
parent 54e9b5b99a
commit a13565dd95
4 changed files with 82 additions and 1 deletions

View file

@ -4122,6 +4122,58 @@ async def user_update(data: UpdateUserRequest):
)
@router.post(
"/user/request_model",
tags=["user management"],
dependencies=[Depends(user_api_key_auth)],
)
async def user_request_model(request: Request):
"""
Allow a user to create a request to access a model
"""
global prisma_client
try:
data_json = await request.json()
# get the row from db
if prisma_client is None:
raise Exception("Not connected to DB!")
non_default_values = {k: v for k, v in data_json.items() if v is not None}
new_models = non_default_values.get("models", None)
user_id = non_default_values.get("user_id", None)
justification = non_default_values.get("justification", None)
response = await prisma_client.insert_data(
data={
"models": new_models,
"justification": justification,
"user_id": user_id,
"request_id": str(uuid.uuid4()),
},
table_name="model_request",
)
return {"status": "success"}
# update based on remaining passed in values
except Exception as e:
traceback.print_exc()
if isinstance(e, HTTPException):
raise ProxyException(
message=getattr(e, "detail", f"Authentication Error({str(e)})"),
type="auth_error",
param=getattr(e, "param", "None"),
code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST),
)
elif isinstance(e, ProxyException):
raise e
raise ProxyException(
message="Authentication Error, " + str(e),
type="auth_error",
param=getattr(e, "param", "None"),
code=status.HTTP_400_BAD_REQUEST,
)
#### TEAM MANAGEMENT ####

View file

@ -95,4 +95,12 @@ model LiteLLM_SpendLogs {
cache_hit String @default("")
cache_key String @default("")
request_tags Json @default("[]")
}
// Beta - allow team members to request access to a model
model LiteLLM_ModelRequests {
request_id String @unique
user_id String
models String[]
justification String
}

View file

@ -696,7 +696,9 @@ class PrismaClient:
on_backoff=on_backoff, # specifying the function to call on backoff
)
async def insert_data(
self, data: dict, table_name: Literal["user", "key", "config", "spend", "team"]
self,
data: dict,
table_name: Literal["user", "key", "config", "spend", "team", "model_request"],
):
"""
Add a key to the database. If it already exists, do nothing.
@ -778,6 +780,17 @@ class PrismaClient:
)
verbose_proxy_logger.info(f"Data Inserted into Spend Table")
return new_spend_row
elif table_name == "model_request":
db_data = self.jsonify_object(data=data)
new_model_request_row = await self.db.litellm_modelrequests.upsert(
where={"request_id": data["request_id"]},
data={
"create": {**db_data}, # type: ignore
"update": {}, # don't do anything if it already exists
},
)
verbose_proxy_logger.info(f"Data Inserted into Model Request Table")
return new_model_request_row
except Exception as e:
print_verbose(f"LiteLLM Prisma Client Exception: {e}")

View file

@ -95,4 +95,12 @@ model LiteLLM_SpendLogs {
cache_hit String @default("")
cache_key String @default("")
request_tags Json @default("[]")
}
// Beta - allow team members to request access to a model
model LiteLLM_ModelRequests {
request_id String @unique
user_id String
models String[]
justification String
}