From 8b408bb83b700a5e3246962e5ebdf0a9e3d76fe6 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Mon, 14 Aug 2023 17:17:18 -0700 Subject: [PATCH] allow setting litellm.api_key --- litellm/__init__.py | 1 + litellm/utils.py | 4 ++-- pyproject.toml | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/litellm/__init__.py b/litellm/__init__.py index e3ce5fbec9..6b82406781 100644 --- a/litellm/__init__.py +++ b/litellm/__init__.py @@ -5,6 +5,7 @@ set_verbose=False telemetry=True max_tokens = 256 # OpenAI Defaults retry = True +api_key = None openai_key = None azure_key = None anthropic_key = None diff --git a/litellm/utils.py b/litellm/utils.py index f57c390cbe..662e70a9cc 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -717,10 +717,10 @@ def get_secret(secret_name): # if secret manager fails default to using .env variables os.environ[secret_name] = secret # set to env to be safe return secret + elif litellm.api_key != None: # if users use litellm default key + return litellm.api_key else: return os.environ.get(secret_name) - else: - return os.environ.get(secret_name) ######## Streaming Class ############################ # wraps the completion stream to return the correct format for the model diff --git a/pyproject.toml b/pyproject.toml index 2235a6da06..df6f809daf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.1.389" +version = "0.1.390" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License"