Litellm dev 12 27 2024 p2 1 (#7449)

* fix(azure_ai/transformation.py): route ai.services.azure calls to the azure provider route

requires token to be passed in as 'api-key'

Closes https://github.com/BerriAI/litellm/issues/7275

* fix(key_management_endpoints.py): enforce user is member of team, if team_id set and team_id exists in team table

* fix(key_management_endpoints.py): handle assigned_user_id = none

* feat(create_key_button.tsx): allow assigning keys to other users

allows proxy admin to easily assign other people keys

* build(create_key_button.tsx): fix error message display

don't swallow the error message for key creation failure

* build(create_key_button.tsx): allow proxy admin to edit team id

* build(create_key_button.tsx): allow proxy admin to assign keys to other users

* build(edit_user.tsx): clarify how 'user budgets' are applied

* test: remove dup test

* fix(key_management_endpoints.py): don't raise error if team not in db

'

* test: fix test
This commit is contained in:
Krish Dholakia 2024-12-27 20:02:32 -08:00 committed by GitHub
parent 67b39bacf7
commit 0924df4971
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 92 additions and 42 deletions

View file

@ -51,7 +51,7 @@ class AzureAIStudioConfig(OpenAIConfig):
message["content"] = texts
return messages
def _is_azure_openai_model(self, model: str) -> bool:
def _is_azure_openai_model(self, model: str, api_base: Optional[str]) -> bool:
try:
if "/" in model:
model = model.split("/", 1)[1]
@ -61,6 +61,9 @@ class AzureAIStudioConfig(OpenAIConfig):
or model in litellm.open_ai_embedding_models
):
return True
if api_base and "services.ai.azure" in api_base:
return True
except Exception:
return False
return False
@ -75,7 +78,7 @@ class AzureAIStudioConfig(OpenAIConfig):
api_base = api_base or get_secret_str("AZURE_AI_API_BASE")
dynamic_api_key = api_key or get_secret_str("AZURE_AI_API_KEY")
if self._is_azure_openai_model(model=model):
if self._is_azure_openai_model(model=model, api_base=api_base):
verbose_logger.debug(
"Model={} is Azure OpenAI model. Setting custom_llm_provider='azure'.".format(
model

View file

@ -94,29 +94,46 @@ def _is_allowed_to_create_key(
def _team_key_generation_team_member_check(
assigned_user_id: Optional[str],
team_table: LiteLLM_TeamTableCachedObj,
user_api_key_dict: UserAPIKeyAuth,
team_key_generation: Optional[TeamUIKeyGenerationConfig],
team_key_generation: TeamUIKeyGenerationConfig,
):
if (
team_key_generation is None
or "allowed_team_member_roles" not in team_key_generation
):
return True
if assigned_user_id is not None:
key_assigned_user_in_team = _get_user_in_team(
team_table=team_table, user_id=assigned_user_id
)
user_in_team = _get_user_in_team(
if key_assigned_user_in_team is None:
raise HTTPException(
status_code=400,
detail=f"User={assigned_user_id} not assigned to team={team_table.team_id}",
)
key_creating_user_in_team = _get_user_in_team(
team_table=team_table, user_id=user_api_key_dict.user_id
)
if user_in_team is None:
is_admin = (
user_api_key_dict.user_role is not None
and user_api_key_dict.user_role == LitellmUserRoles.PROXY_ADMIN.value
)
if is_admin:
return True
elif key_creating_user_in_team is None:
raise HTTPException(
status_code=400,
detail=f"User={user_api_key_dict.user_id} not assigned to team={team_table.team_id}",
)
if user_in_team.role not in team_key_generation["allowed_team_member_roles"]:
elif (
"allowed_team_member_roles" in team_key_generation
and key_creating_user_in_team.role
not in team_key_generation["allowed_team_member_roles"]
):
raise HTTPException(
status_code=400,
detail=f"Team member role {user_in_team.role} not in allowed_team_member_roles={team_key_generation['allowed_team_member_roles']}",
detail=f"Team member role {key_creating_user_in_team.role} not in allowed_team_member_roles={team_key_generation['allowed_team_member_roles']}",
)
return True
@ -143,14 +160,17 @@ def _team_key_generation_check(
data: GenerateKeyRequest,
):
if (
litellm.key_generation_settings is None
or litellm.key_generation_settings.get("team_key_generation") is None
litellm.key_generation_settings is not None
and "team_key_generation" in litellm.key_generation_settings
):
return True
_team_key_generation = litellm.key_generation_settings["team_key_generation"] # type: ignore
_team_key_generation = litellm.key_generation_settings["team_key_generation"]
else:
_team_key_generation = TeamUIKeyGenerationConfig(
allowed_team_member_roles=["admin", "member"],
)
_team_key_generation_team_member_check(
assigned_user_id=data.user_id,
team_table=team_table,
user_api_key_dict=user_api_key_dict,
team_key_generation=_team_key_generation,
@ -215,21 +235,17 @@ def key_generation_check(
"""
Check if admin has restricted key creation to certain roles for teams or individuals
"""
if (
litellm.key_generation_settings is None
or user_api_key_dict.user_role == LitellmUserRoles.PROXY_ADMIN.value
):
return True
## check if key is for team or individual
is_team_key = _is_team_key(data=data)
if is_team_key:
if team_table is None:
if team_table is None and litellm.key_generation_settings is not None:
raise HTTPException(
status_code=400,
detail=f"Unable to find team object in database. Team ID: {data.team_id}",
)
elif team_table is None:
return True # assume user is assigning team_id without using the team table
return _team_key_generation_check(
team_table=team_table,
user_api_key_dict=user_api_key_dict,
@ -332,21 +348,26 @@ async def generate_key_fn( # noqa: PLR0915
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN, detail=message
)
elif litellm.key_generation_settings is not None:
if data.team_id is None:
team_table: Optional[LiteLLM_TeamTableCachedObj] = None
else:
team_table: Optional[LiteLLM_TeamTableCachedObj] = None
if data.team_id is not None:
try:
team_table = await get_team_object(
team_id=data.team_id,
prisma_client=prisma_client,
user_api_key_cache=user_api_key_cache,
parent_otel_span=user_api_key_dict.parent_otel_span,
)
key_generation_check(
team_table=team_table,
user_api_key_dict=user_api_key_dict,
data=data,
)
except Exception as e:
verbose_proxy_logger.debug(
f"Error getting team object in `/key/generate`: {e}"
)
team_table = None
key_generation_check(
team_table=team_table,
user_api_key_dict=user_api_key_dict,
data=data,
)
try:
_is_allowed_to_create_key(

View file

@ -191,3 +191,12 @@ def test_get_llm_provider_watson_text():
)
assert custom_llm_provider == "watsonx_text"
assert model == "watson-text-to-speech"
def test_azure_global_standard_get_llm_provider():
model, custom_llm_provider, dynamic_api_key, api_base = litellm.get_llm_provider(
model="azure_ai/gpt-4o-global-standard",
api_base="https://my-deployment-francecentral.services.ai.azure.com/models/chat/completions?api-version=2024-05-01-preview",
api_key="fake-api-key",
)
assert custom_llm_provider == "azure"

View file

@ -125,6 +125,7 @@ const CreateKey: React.FC<CreateKeyProps> = ({
try {
const newKeyAlias = formValues?.key_alias ?? "";
const newKeyTeamId = formValues?.team_id ?? null;
const existingKeyAliases =
data
?.filter((k) => k.team_id === newKeyTeamId)
@ -163,8 +164,8 @@ const CreateKey: React.FC<CreateKeyProps> = ({
form.resetFields();
localStorage.removeItem("userData" + userID);
} catch (error) {
console.error("Error creating the key:", error);
message.error(`Error creating the key: ${error}`, 20);
console.log("error in create key:", error);
message.error(`Error creating the key: ${error}`);
}
};
@ -224,11 +225,27 @@ const CreateKey: React.FC<CreateKeyProps> = ({
>
<Radio value="you">You</Radio>
<Radio value="service_account">Service Account</Radio>
{userRole === "Admin" && <Radio value="another_user">Another User</Radio>}
</Radio.Group>
</Form.Item>
<Form.Item
label={keyOwner === "you" ? "Key Name" : "Service Account ID"}
label="User ID"
name="user_id"
hidden={keyOwner !== "another_user"}
valuePropName="user_id"
className="mt-8"
rules={[{ required: keyOwner === "another_user", message: `Please input the user ID of the user you are assigning the key to` }]}
help={"Get User ID - Click on the 'Users' tab in the sidebar."}
>
<TextInput
placeholder="User ID"
onChange={(e) => form.setFieldValue('user_id', e.target.value)}
/>
</Form.Item>
<Form.Item
label={keyOwner === "you" || keyOwner === "another_user" ? "Key Name" : "Service Account ID"}
name="key_alias"
rules={[{ required: true, message: `Please input a ${keyOwner === "you" ? "key name" : "service account ID"}` }]}
help={keyOwner === "you" ? "required" : "IDs can include letters, numbers, and hyphens"}
@ -238,12 +255,12 @@ const CreateKey: React.FC<CreateKeyProps> = ({
<Form.Item
label="Team ID"
name="team_id"
hidden={true}
hidden={keyOwner !== "another_user"}
initialValue={team ? team["team_id"] : null}
valuePropName="team_id"
className="mt-8"
>
<Input value={team ? team["team_alias"] : ""} disabled />
<TextInput defaultValue={team ? team["team_id"] : null} onChange={(e) => form.setFieldValue('team_id', e.target.value)}/>
</Form.Item>
<Form.Item

View file

@ -110,6 +110,7 @@ const EditUserModal: React.FC<EditUserModalProps> = ({ visible, possibleUIRoles,
label="Spend (USD)"
name="spend"
tooltip="(float) - Spend of all LLM calls completed by this user"
help="Across all keys (including keys with team_id)."
>
<InputNumber min={0} step={1} />
</Form.Item>
@ -118,6 +119,7 @@ const EditUserModal: React.FC<EditUserModalProps> = ({ visible, possibleUIRoles,
label="User Budget (USD)"
name="max_budget"
tooltip="(float) - Maximum budget of this user"
help="Ignored if the key has a team_id; team budget applies there."
>
<InputNumber min={0} step={1} />
</Form.Item>

View file

@ -31,8 +31,6 @@ const handleError = async (errorData: string) => {
await sleep(3000); // 5 second sleep
document.cookie = "token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/;";
window.location.href = baseUrl;
} else {
message.error(errorData);
}
lastErrorTime = currentTime;
} else {
@ -450,7 +448,7 @@ export const keyCreateCall = async (
const errorData = await response.text();
handleError(errorData);
console.error("Error response from the server:", errorData);
throw new Error("Network response was not ok");
throw new Error(errorData);
}
const data = await response.json();