From f3fff173d2845fea1b04c86c710e326e0e72a77b Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Sat, 25 Nov 2023 12:11:24 -0800 Subject: [PATCH] (docs) custom openai embedding model config --- docs/my-website/docs/simple_proxy.md | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/docs/my-website/docs/simple_proxy.md b/docs/my-website/docs/simple_proxy.md index c58cdce4a2..3758ba6484 100644 --- a/docs/my-website/docs/simple_proxy.md +++ b/docs/my-website/docs/simple_proxy.md @@ -541,7 +541,7 @@ environment_variables: REDIS_PASSWORD: ``` -### Config for Multiple Models - GPT-4, Claude-2, etc +### Config for Multiple Models - GPT-4, Claude-2 Here's how you can use multiple llms with one proxy `config.yaml`. @@ -847,7 +847,27 @@ litellm_settings: allowed_fails: 3 # cooldown model if it fails > 1 call in a minute. ``` -### Model Alias + +### Config for Embedding Models - xorbitsai/inference + +Here's how you can use multiple llms with one proxy `config.yaml`. +Here is how [LiteLLM calls OpenAI Compatible Embedding models](https://docs.litellm.ai/docs/embedding/supported_embedding#openai-compatible-embedding-models) + +#### Config +```yaml +model_list: + - model_name: custom_embedding_model + litellm_params: + model: openai/custom_embedding # the `openai/` prefix tells litellm it's openai compatible + api_base: http://0.0.0.0:8000/ + - model_name: custom_embedding_model + litellm_params: + model: openai/custom_embedding # the `openai/` prefix tells litellm it's openai compatible + api_base: http://0.0.0.0:8001/ +``` + + +### Config for setting Model Aliases Set a model alias for your deployments.