From 95e6d2fbba4edc24738a5d583cfcda08bdb69210 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Thu, 28 Dec 2023 17:09:27 +0530 Subject: [PATCH] (feat) add voyage ai embeddings --- litellm/__init__.py | 1 + litellm/main.py | 14 ++++++++++++++ litellm/tests/test_embedding.py | 15 +++++++++++++++ litellm/utils.py | 4 ++++ 4 files changed, 34 insertions(+) diff --git a/litellm/__init__.py b/litellm/__init__.py index 42c0b49b3..09f7c1cfc 100644 --- a/litellm/__init__.py +++ b/litellm/__init__.py @@ -389,6 +389,7 @@ provider_list: List = [ "anyscale", "mistral", "maritalk", + "voyage", "custom", # custom apis ] diff --git a/litellm/main.py b/litellm/main.py index 06b002ed0..7c3d46462 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -2356,6 +2356,20 @@ def embedding( client=client, aembedding=aembedding, ) + elif custom_llm_provider == "voyage": + api_key = api_key or litellm.api_key or get_secret("VOYAGE_API_KEY") + response = openai_chat_completions.embedding( + model=model, + input=input, + api_base=api_base, + api_key=api_key, + logging_obj=logging, + timeout=timeout, + model_response=EmbeddingResponse(), + optional_params=optional_params, + client=client, + aembedding=aembedding, + ) else: args = locals() raise ValueError(f"No valid embedding model args passed in - {args}") diff --git a/litellm/tests/test_embedding.py b/litellm/tests/test_embedding.py index e77f9417a..a1dfb6ab3 100644 --- a/litellm/tests/test_embedding.py +++ b/litellm/tests/test_embedding.py @@ -318,6 +318,21 @@ def test_mistral_embeddings(): # test_mistral_embeddings() + +def test_voyage_embeddings(): + try: + litellm.set_verbose = True + response = litellm.embedding( + model="voyage/voyage-01", + input=["good morning from litellm"], + ) + print(f"response: {response}") + except Exception as e: + pytest.fail(f"Error occurred: {e}") + + +# test_voyage_embeddings() + # test_sagemaker_embeddings() # def local_proxy_embeddings(): # litellm.set_verbose=True diff --git a/litellm/utils.py b/litellm/utils.py index 45fa56194..8f73105f7 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -3751,6 +3751,10 @@ def get_llm_provider( # mistral is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.mistral.ai api_base = "https://api.mistral.ai/v1" dynamic_api_key = get_secret("MISTRAL_API_KEY") + elif custom_llm_provider == "voyage": + # voyage is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.voyageai.com/v1 + api_base = "https://api.voyageai.com/v1" + dynamic_api_key = get_secret("VOYAGE_API_KEY") return model, custom_llm_provider, dynamic_api_key, api_base # check if api base is a known openai compatible endpoint