From 0b94e968e178cc31a5e28f9524ae568aa5837e15 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Fri, 20 Oct 2023 14:30:19 -0700 Subject: [PATCH] docs(multiple_deployments.md): docs on how to route between multiple deployments --- .../docs/completion/multiple_deployments.md | 53 +++++++++++++++++++ docs/my-website/sidebars.js | 1 + 2 files changed, 54 insertions(+) create mode 100644 docs/my-website/docs/completion/multiple_deployments.md diff --git a/docs/my-website/docs/completion/multiple_deployments.md b/docs/my-website/docs/completion/multiple_deployments.md new file mode 100644 index 0000000000..7337906dbb --- /dev/null +++ b/docs/my-website/docs/completion/multiple_deployments.md @@ -0,0 +1,53 @@ +# Multiple Deployments + +If you have multiple deployments of the same model, you can pass the list of deployments, and LiteLLM will return the first result. + +## Quick Start + +Multiple providers offer Mistral-7B-Instruct. + +Here's how you can use litellm to return the first result: + +```python +from litellm import completion + +messages=[{"role": "user", "content": "Hey, how's it going?"}] + +## All your mistral deployments ## +model_list = [{ + "model_name": "mistral-7b-instruct", + "litellm_params": { # params for litellm completion/embedding call + "model": "replicate/mistralai/mistral-7b-instruct-v0.1:83b6a56e7c828e667f21fd596c338fd4f0039b46bcfa18d973e8e70e455fda70", + "api_key": "replicate_api_key", + } +}, { + "model_name": "mistral-7b-instruct", + "litellm_params": { # params for litellm completion/embedding call + "model": "together_ai/mistralai/Mistral-7B-Instruct-v0.1", + "api_key": "togetherai_api_key", + } +}, { + "model_name": "mistral-7b-instruct", + "litellm_params": { # params for litellm completion/embedding call + "model": "together_ai/mistralai/Mistral-7B-Instruct-v0.1", + "api_key": "togetherai_api_key", + } +}, { + "model_name": "mistral-7b-instruct", + "litellm_params": { # params for litellm completion/embedding call + "model": "perplexity/mistral-7b-instruct", + "api_key": "perplexity_api_key" + } +}, { + "model_name": "mistral-7b-instruct", + "litellm_params": { + "model": "deepinfra/mistralai/Mistral-7B-Instruct-v0.1", + "api_key": "deepinfra_api_key" + } +}] + +## LiteLLM completion call ## returns first response +response = completion(model="mistral-7b-instruct", messages=messages, model_list=model_list) + +print(response) +``` \ No newline at end of file diff --git a/docs/my-website/sidebars.js b/docs/my-website/sidebars.js index 8f113c5ae0..ac35c4cd08 100644 --- a/docs/my-website/sidebars.js +++ b/docs/my-website/sidebars.js @@ -37,6 +37,7 @@ const sidebars = { "completion/function_call", "completion/model_alias", "completion/reliable_completions", + "completion/multiple_deployments", "completion/config", "completion/batching", "completion/mock_requests",