fix(proxy_server.py): If master key is set, only master key can be used to generate new keys

This commit is contained in:
Krrish Dholakia 2023-11-22 10:18:28 -08:00
parent 10fe16c965
commit 604ad41eac
2 changed files with 15 additions and 5 deletions

View file

@ -164,8 +164,14 @@ async def user_api_key_auth(request: Request):
return
try:
api_key = await oauth2_scheme(request=request)
route = request.url.path
if api_key == master_key:
return
if route == "/key/generate" and api_key != master_key:
raise Exception(f"If master key is set, only master key can be used to generate new keys")
if api_key in config_cache:
llm_model_list = config_cache[api_key].get("model_list", [])
return
@ -593,6 +599,11 @@ async def generate_key_fn(request: Request):
detail={"error": "models param must be a list"},
)
@router.get("/test")
async def test_endpoint(request: Request):
return {"route": request.url.path}
#### EXPERIMENTAL QUEUING ####
@router.post("/queue/request", dependencies=[Depends(user_api_key_auth)])
async def async_queue_request(request: Request):
global celery_fn, llm_model_list

View file

@ -164,9 +164,7 @@ def test_completion_gpt4_vision():
def test_completion_perplexity_api():
try:
litellm.set_verbose=True
litellm.num_retries = 0
litellm.drop_params = True
# litellm.set_verbose=True
messages=[{
"role": "system",
"content": "You're a good bot"
@ -180,7 +178,7 @@ def test_completion_perplexity_api():
response = completion(
model="mistral-7b-instruct",
messages=messages,
api_base="https://api.perplexity.ai", stop="Hello")
api_base="https://api.perplexity.ai")
print(response)
except Exception as e:
pytest.fail(f"Error occurred: {e}")
@ -446,7 +444,7 @@ def test_completion_openai():
pass
except Exception as e:
pytest.fail(f"Error occurred: {e}")
test_completion_openai()
# test_completion_openai()
def test_completion_text_openai():
try:
@ -832,6 +830,7 @@ def test_completion_replicate_llama2_stream():
# assert len(chunk.choices[0].delta["content"]) > 2
# print(chunk)
assert len(complete_response) > 5
print(f"complete_response: {complete_response}")
except Exception as e:
pytest.fail(f"Error occurred: {e}")
test_completion_replicate_llama2_stream()