From 1bc2a6d5cc0b6e6cedd178fb51df50ebc8ffc18e Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Sat, 2 Sep 2023 17:49:25 -0700 Subject: [PATCH] thread safe version, key and base for openai --- litellm/main.py | 32 +++++++++++++------------------- 1 file changed, 13 insertions(+), 19 deletions(-) diff --git a/litellm/main.py b/litellm/main.py index ed8741d95..500219c26 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -87,6 +87,7 @@ def completion( *, return_async=False, api_key=None, + api_version=None, force_timeout=600, logger_fn=None, verbose=False, @@ -198,7 +199,9 @@ def completion( ) else: response = openai.ChatCompletion.create( - engine=model, messages=messages, **optional_params + engine=model, + messages=messages, + **optional_params ) if "stream" in optional_params and optional_params["stream"] == True: response = CustomStreamWrapper(response, model, logging_obj=logging) @@ -228,7 +231,6 @@ def completion( or get_secret("OPENAI_API_BASE") or "https://api.openai.com/v1" ) - openai.api_version = None if litellm.organization: openai.organization = litellm.organization # set API KEY @@ -237,8 +239,6 @@ def completion( elif not api_key and get_secret("OPENAI_API_KEY"): api_key = get_secret("OPENAI_API_KEY") - openai.api_key = api_key - ## LOGGING logging.pre_call( input=messages, @@ -247,21 +247,15 @@ def completion( ) ## COMPLETION CALL try: - if litellm.headers: - response = openai.ChatCompletion.create( - model=model, - messages=messages, - headers=litellm.headers, - api_base=api_base, - **optional_params, - ) - else: - response = openai.ChatCompletion.create( - model=model, - messages=messages, - api_base=api_base, # thread safe setting of api_base - **optional_params - ) + response = openai.ChatCompletion.create( + model=model, + messages=messages, + headers=litellm.headers, # None by default + api_base=api_base, # thread safe setting base, key, api_version + api_key=api_key, + api_version=api_version # default None + **optional_params, + ) except Exception as e: ## LOGGING - log the original exception returned logging.post_call(