forked from phoenix/litellm-mirror
fix(proxy_server.py): support checking openai user param
This commit is contained in:
parent
7924700df6
commit
b3493269b3
2 changed files with 41 additions and 5 deletions
|
@ -169,11 +169,43 @@ If any call is made to proxy with this user id, it'll be rejected - use this if
|
||||||
```yaml
|
```yaml
|
||||||
litellm_settings:
|
litellm_settings:
|
||||||
callbacks: ["blocked_user_check"]
|
callbacks: ["blocked_user_check"]
|
||||||
blocked_user_id_list: ["user_id_1", "user_id_2", ...] # can also be a .txt filepath e.g. `/relative/path/blocked_list.txt`
|
blocked_user_list: ["user_id_1", "user_id_2", ...] # can also be a .txt filepath e.g. `/relative/path/blocked_list.txt`
|
||||||
```
|
```
|
||||||
|
|
||||||
### How to test
|
### How to test
|
||||||
|
|
||||||
|
<Tabs>
|
||||||
|
|
||||||
|
|
||||||
|
<TabItem value="openai" label="OpenAI Python v1.0.0+">
|
||||||
|
|
||||||
|
Set `user=<user_id>` to the user id of the user who might have opted out.
|
||||||
|
|
||||||
|
```python
|
||||||
|
import openai
|
||||||
|
client = openai.OpenAI(
|
||||||
|
api_key="sk-1234",
|
||||||
|
base_url="http://0.0.0.0:4000"
|
||||||
|
)
|
||||||
|
|
||||||
|
# request sent to model set on litellm proxy, `litellm --model`
|
||||||
|
response = client.chat.completions.create(
|
||||||
|
model="gpt-3.5-turbo",
|
||||||
|
messages = [
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "this is a test request, write a short poem"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
user="user_id_1"
|
||||||
|
)
|
||||||
|
|
||||||
|
print(response)
|
||||||
|
```
|
||||||
|
</TabItem>
|
||||||
|
|
||||||
|
<TabItem value="Curl" label="Curl Request">
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
curl --location 'http://0.0.0.0:4000/chat/completions' \
|
curl --location 'http://0.0.0.0:4000/chat/completions' \
|
||||||
--header 'Content-Type: application/json' \
|
--header 'Content-Type: application/json' \
|
||||||
|
@ -185,11 +217,14 @@ curl --location 'http://0.0.0.0:4000/chat/completions' \
|
||||||
"content": "what llm are you"
|
"content": "what llm are you"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"user_id": "user_id_1" # this is also an openai supported param
|
"user": "user_id_1" # this is also an openai supported param
|
||||||
}
|
}
|
||||||
'
|
'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
</TabItem>
|
||||||
|
</Tabs>
|
||||||
|
|
||||||
:::info
|
:::info
|
||||||
|
|
||||||
[Suggest a way to improve this](https://github.com/BerriAI/litellm/issues/new/choose)
|
[Suggest a way to improve this](https://github.com/BerriAI/litellm/issues/new/choose)
|
||||||
|
|
|
@ -66,12 +66,13 @@ class _ENTERPRISE_BlockedUserList(CustomLogger):
|
||||||
- check if user id part of blocked list
|
- check if user id part of blocked list
|
||||||
"""
|
"""
|
||||||
self.print_verbose(f"Inside Blocked User List Pre-Call Hook")
|
self.print_verbose(f"Inside Blocked User List Pre-Call Hook")
|
||||||
if "user_id" in data:
|
if "user_id" in data or "user" in data:
|
||||||
if data["user_id"] in self.blocked_user_list:
|
user = data.get("user_id", data.get("user", ""))
|
||||||
|
if user in self.blocked_user_list:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=400,
|
status_code=400,
|
||||||
detail={
|
detail={
|
||||||
"error": f"User blocked from making LLM API Calls. User={data['user_id']}"
|
"error": f"User blocked from making LLM API Calls. User={user}"
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
except HTTPException as e:
|
except HTTPException as e:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue