forked from phoenix/litellm-mirror
(feat) add voyage ai embeddings
This commit is contained in:
parent
cbcf406fd0
commit
95e6d2fbba
4 changed files with 34 additions and 0 deletions
|
@ -389,6 +389,7 @@ provider_list: List = [
|
|||
"anyscale",
|
||||
"mistral",
|
||||
"maritalk",
|
||||
"voyage",
|
||||
"custom", # custom apis
|
||||
]
|
||||
|
||||
|
|
|
@ -2356,6 +2356,20 @@ def embedding(
|
|||
client=client,
|
||||
aembedding=aembedding,
|
||||
)
|
||||
elif custom_llm_provider == "voyage":
|
||||
api_key = api_key or litellm.api_key or get_secret("VOYAGE_API_KEY")
|
||||
response = openai_chat_completions.embedding(
|
||||
model=model,
|
||||
input=input,
|
||||
api_base=api_base,
|
||||
api_key=api_key,
|
||||
logging_obj=logging,
|
||||
timeout=timeout,
|
||||
model_response=EmbeddingResponse(),
|
||||
optional_params=optional_params,
|
||||
client=client,
|
||||
aembedding=aembedding,
|
||||
)
|
||||
else:
|
||||
args = locals()
|
||||
raise ValueError(f"No valid embedding model args passed in - {args}")
|
||||
|
|
|
@ -318,6 +318,21 @@ def test_mistral_embeddings():
|
|||
|
||||
# test_mistral_embeddings()
|
||||
|
||||
|
||||
def test_voyage_embeddings():
|
||||
try:
|
||||
litellm.set_verbose = True
|
||||
response = litellm.embedding(
|
||||
model="voyage/voyage-01",
|
||||
input=["good morning from litellm"],
|
||||
)
|
||||
print(f"response: {response}")
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
|
||||
# test_voyage_embeddings()
|
||||
|
||||
# test_sagemaker_embeddings()
|
||||
# def local_proxy_embeddings():
|
||||
# litellm.set_verbose=True
|
||||
|
|
|
@ -3751,6 +3751,10 @@ def get_llm_provider(
|
|||
# mistral is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.mistral.ai
|
||||
api_base = "https://api.mistral.ai/v1"
|
||||
dynamic_api_key = get_secret("MISTRAL_API_KEY")
|
||||
elif custom_llm_provider == "voyage":
|
||||
# voyage is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.voyageai.com/v1
|
||||
api_base = "https://api.voyageai.com/v1"
|
||||
dynamic_api_key = get_secret("VOYAGE_API_KEY")
|
||||
return model, custom_llm_provider, dynamic_api_key, api_base
|
||||
|
||||
# check if api base is a known openai compatible endpoint
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue