forked from phoenix/litellm-mirror
Merge pull request #346 from WilliamEspegren/main
util: verify_access_key
This commit is contained in:
commit
c30acc67b3
3 changed files with 38 additions and 1 deletions
|
@ -17,6 +17,7 @@ from litellm.utils import (
|
||||||
CustomStreamWrapper,
|
CustomStreamWrapper,
|
||||||
read_config_args,
|
read_config_args,
|
||||||
completion_with_fallbacks,
|
completion_with_fallbacks,
|
||||||
|
verify_access_key,
|
||||||
get_llm_provider
|
get_llm_provider
|
||||||
)
|
)
|
||||||
from .llms import anthropic
|
from .llms import anthropic
|
||||||
|
|
20
litellm/tests/test_verify_openai_key.py
Normal file
20
litellm/tests/test_verify_openai_key.py
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
from litellm import verify_access_key
|
||||||
|
import os
|
||||||
|
|
||||||
|
def test_bad_key():
|
||||||
|
key = "bad-key"
|
||||||
|
response = verify_access_key(key)
|
||||||
|
if response == False:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise Exception("Bad key was not detected")
|
||||||
|
test_bad_key()
|
||||||
|
|
||||||
|
def test_good_key():
|
||||||
|
key = os.environ['OPENAI_API_KEY']
|
||||||
|
response = verify_access_key(key)
|
||||||
|
if response == True:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise Exception("Good key did not pass")
|
||||||
|
test_good_key()
|
|
@ -2604,6 +2604,22 @@ def trim_messages(
|
||||||
return messages
|
return messages
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Verify that the user has passed in a valid and active api key
|
||||||
|
def verify_access_key(access_key:str):
|
||||||
|
openai.api_key = access_key
|
||||||
|
try:
|
||||||
|
test_response = openai.ChatCompletion.create(
|
||||||
|
model="gpt-3.5-turbo",
|
||||||
|
messages=[
|
||||||
|
{"role": "user", "content": "test"},
|
||||||
|
],
|
||||||
|
max_tokens = 10
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
# this helper reads the .env and returns a list of supported llms for user
|
# this helper reads the .env and returns a list of supported llms for user
|
||||||
def get_valid_models():
|
def get_valid_models():
|
||||||
try:
|
try:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue