Merge pull request #346 from WilliamEspegren/main

util: verify_access_key
This commit is contained in:
Ishaan Jaff 2023-09-12 11:44:49 -07:00 committed by GitHub
commit c30acc67b3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 38 additions and 1 deletions

View file

@ -17,6 +17,7 @@ from litellm.utils import (
CustomStreamWrapper,
read_config_args,
completion_with_fallbacks,
verify_access_key,
get_llm_provider
)
from .llms import anthropic

View file

@ -0,0 +1,20 @@
from litellm import verify_access_key
import os
def test_bad_key():
key = "bad-key"
response = verify_access_key(key)
if response == False:
pass
else:
raise Exception("Bad key was not detected")
test_bad_key()
def test_good_key():
key = os.environ['OPENAI_API_KEY']
response = verify_access_key(key)
if response == True:
pass
else:
raise Exception("Good key did not pass")
test_good_key()

View file

@ -2604,6 +2604,22 @@ def trim_messages(
return messages
# Verify that the user has passed in a valid and active api key
def verify_access_key(access_key:str):
openai.api_key = access_key
try:
test_response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": "test"},
],
max_tokens = 10
)
return True
except:
return False
# this helper reads the .env and returns a list of supported llms for user
def get_valid_models():
try:
@ -2632,4 +2648,4 @@ def get_valid_models():
valid_models.extend(models_for_provider)
return valid_models
except:
return [] # NON-Blocking
return [] # NON-Blocking