From 6e19bb87e25b687b4372ccc77b369c96080fc1a2 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Wed, 10 Jan 2024 22:16:02 +0530 Subject: [PATCH] (docs) proxy config - show how to set seed, temp on config.yaml --- docs/my-website/docs/proxy/configs.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/my-website/docs/proxy/configs.md b/docs/my-website/docs/proxy/configs.md index 9b1834027..b3fc93105 100644 --- a/docs/my-website/docs/proxy/configs.md +++ b/docs/my-website/docs/proxy/configs.md @@ -188,7 +188,7 @@ print(response) -## Save Model-specific params (API Base, API Keys, Temperature, Headers etc.) +## Save Model-specific params (API Base, API Keys, Temperature, Max Tokens, Seed, Headers etc.) You can use the config to save model-specific information like api_base, api_key, temperature, max_tokens, etc. [**All input params**](https://docs.litellm.ai/docs/completion/input#input-params-1) @@ -202,11 +202,14 @@ model_list: api_base: https://openai-gpt-4-test-v-1.openai.azure.com/ api_version: "2023-05-15" azure_ad_token: eyJ0eXAiOiJ + seed: 12 + max_tokens: 20 - model_name: gpt-4-team2 litellm_params: model: azure/gpt-4 api_key: sk-123 api_base: https://openai-gpt-4-test-v-2.openai.azure.com/ + temperature: 0.2 - model_name: mistral-7b litellm_params: model: ollama/mistral