From e037318344b4c7adc9d9c7edc0b39d4a0b0aec8e Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Wed, 26 Jul 2023 17:31:25 -0700 Subject: [PATCH] good shit --- __init__.py | 1 + completion_test.py | 30 +++++++++++++ embedding_test.py | 19 +++++++++ main.py | 103 +++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 153 insertions(+) create mode 100644 __init__.py create mode 100644 completion_test.py create mode 100644 embedding_test.py create mode 100644 main.py diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000000..25925d83c8 --- /dev/null +++ b/__init__.py @@ -0,0 +1 @@ +from .main import * # Import all the symbols from main.py \ No newline at end of file diff --git a/completion_test.py b/completion_test.py new file mode 100644 index 0000000000..4c552ab673 --- /dev/null +++ b/completion_test.py @@ -0,0 +1,30 @@ +from main import completion +import os + +## Configs for Models ## +# OpenAI Configs + +## End Configs for Models ## + + +messages = [{ "content": "Hello, how are you?","role": "user"}] +# openai call +response = completion(model="gpt-3.5-turbo", messages=messages) +print("\nOpenAI call") +print(response) + +# azure openai call +response = completion("chatgpt-test", messages, azure=True) +print("\nAzure call") +print(response) + +# text davinci openai call +response = completion("text-davinci-003", messages) +print("\nDavinci call") +print(response) + +# cohere call +response = completion("command-nightly", messages) +print("\nCohere call") +print(response) + diff --git a/embedding_test.py b/embedding_test.py new file mode 100644 index 0000000000..e9b085719e --- /dev/null +++ b/embedding_test.py @@ -0,0 +1,19 @@ +from main import embedding +import os + +## Configs for Models ## +# OpenAI Configs + +## End Configs for Models ## + + +input = ["Who is ishaan"] +# openai call +response = embedding(model="text-embedding-ada-002", input=input) +print("\nOpenAI call") +print(response) + +# azure openai call +response = embedding(model="azure-embedding-mode", input=input, azure=True) +print("\nAzure OpenAI call") +print(response) diff --git a/main.py b/main.py new file mode 100644 index 0000000000..cbefea5a9c --- /dev/null +++ b/main.py @@ -0,0 +1,103 @@ +import os, openai, cohere + +####### COMPLETION MODELS ################### +open_ai_chat_completion_models = [ + 'gpt-3.5-turbo', + 'gpt-4' +] +open_ai_text_completion_models = [ + 'text-davinci-003' +] + +cohere_models = [ + 'command-nightly', +] + +####### EMBEDDING MODELS ################### +open_ai_embedding_models = [ + 'text-embedding-ada-002' +] + +############################################# + + +####### COMPLETION ENDPOINTS ################ +############################################# +def completion(model, messages, azure=False): + if azure == True: + # azure configs + openai.api_type = "azure" + openai.api_base = os.environ.get("AZURE_API_BASE") + openai.api_version = os.environ.get("AZURE_API_VERSION") + openai.api_key = os.environ.get("AZURE_API_KEY") + response = openai.ChatCompletion.create( + engine=model, + messages = messages + ) + elif model in cohere_models: + cohere_key = os.environ.get("COHERE_API_KEY") + co = cohere.Client(cohere_key) + prompt = " ".join([message["content"] for message in messages]) + response = co.generate( + model=model, + prompt = prompt + ) + new_response = { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "content": response[0], + "role": "assistant" + } + } + ], + } + + response = new_response + + elif model in open_ai_chat_completion_models: + openai.api_type = "openai" + openai.api_base = "https://api.openai.com/v1" + openai.api_version = None + openai.api_key = os.environ.get("OPENAI_API_KEY") + response = openai.ChatCompletion.create( + model=model, + messages = messages + ) + elif model in open_ai_text_completion_models: + openai.api_type = "openai" + openai.api_base = "https://api.openai.com/v1" + openai.api_version = None + openai.api_key = os.environ.get("OPENAI_API_KEY") + prompt = " ".join([message["content"] for message in messages]) + response = openai.Completion.create( + model=model, + prompt = prompt + ) + return response + + + +### EMBEDDING ENDPOINTS #################### +def embedding(model, input=[], azure=False): + if azure == True: + # azure configs + openai.api_type = "azure" + openai.api_base = os.environ.get("AZURE_API_BASE") + openai.api_version = os.environ.get("AZURE_API_VERSION") + openai.api_key = os.environ.get("AZURE_API_KEY") + response = openai.Embedding.create(input=input, engine=model) + elif model in open_ai_embedding_models: + openai.api_type = "openai" + openai.api_base = "https://api.openai.com/v1" + openai.api_version = None + openai.api_key = os.environ.get("OPENAI_API_KEY") + response = openai.Embedding.create(input=input, model=model) + return response + + +############################################# +############################################# +