diff --git a/litellm/main.py b/litellm/main.py index f352f19c6..06b002ed0 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -2342,6 +2342,20 @@ def embedding( model_response=EmbeddingResponse(), print_verbose=print_verbose, ) + elif custom_llm_provider == "mistral": + api_key = api_key or litellm.api_key or get_secret("MISTRAL_API_KEY") + response = openai_chat_completions.embedding( + model=model, + input=input, + api_base=api_base, + api_key=api_key, + logging_obj=logging, + timeout=timeout, + model_response=EmbeddingResponse(), + optional_params=optional_params, + client=client, + aembedding=aembedding, + ) else: args = locals() raise ValueError(f"No valid embedding model args passed in - {args}")