From 2bdc5a81ecf496bffa492a94e771aa33da4ca508 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Mon, 12 Feb 2024 16:55:34 -0800 Subject: [PATCH] (feat) ui - show user key when creating key --- litellm/proxy/proxy_config.yaml | 66 ++++++------------- .../src/components/create_key_button.tsx | 29 ++++---- 2 files changed, 35 insertions(+), 60 deletions(-) diff --git a/litellm/proxy/proxy_config.yaml b/litellm/proxy/proxy_config.yaml index a549771d5..68097cd83 100644 --- a/litellm/proxy/proxy_config.yaml +++ b/litellm/proxy/proxy_config.yaml @@ -9,60 +9,32 @@ model_list: mode: chat max_tokens: 4096 base_model: azure/gpt-4-1106-preview + - model_name: openai-gpt-3.5 + litellm_params: + model: gpt-3.5-turbo + api_key: os.environ/OPENAI_API_KEY + - model_name: anthropic-claude-v2.1 + litellm_params: + aws_region_name: eu-central-1 + model: bedrock/anthropic.claude-v2:1 + timeout: 300 # sets a 5 minute timeout + input_cost_per_token: 0.00000800 + output_cost_per_token: 0.00002400 + - model_name: anthropic-claude-v2 + litellm_params: + model: bedrock/anthropic.claude-v2 + - model_name: bedrock-cohere + litellm_params: + model: bedrock/cohere.command-text-v14 + timeout: 0.0001 - model_name: gpt-4 litellm_params: model: azure/chatgpt-v-2 api_base: https://openai-gpt-4-test-v-1.openai.azure.com/ api_version: "2023-05-15" api_key: os.environ/AZURE_API_KEY # The `os.environ/` prefix tells litellm to read this from the env. See https://docs.litellm.ai/docs/simple_proxy#load-api-keys-from-vault - - model_name: gpt-vision - litellm_params: - model: azure/gpt-4-vision - base_url: https://gpt-4-vision-resource.openai.azure.com/openai/deployments/gpt-4-vision/extensions - api_key: os.environ/AZURE_VISION_API_KEY - api_version: "2023-09-01-preview" - dataSources: - - type: AzureComputerVision - parameters: - endpoint: os.environ/AZURE_VISION_ENHANCE_ENDPOINT - key: os.environ/AZURE_VISION_ENHANCE_KEY - - model_name: BEDROCK_GROUP - litellm_params: - model: bedrock/cohere.command-text-v14 - timeout: 0.0001 - - model_name: tg-ai - litellm_params: - model: together_ai/mistralai/Mistral-7B-Instruct-v0.1 - - model_name: sagemaker - litellm_params: - model: sagemaker/berri-benchmarking-Llama-2-70b-chat-hf-4 - - model_name: openai-gpt-3.5 - litellm_params: - model: gpt-3.5-turbo - api_key: os.environ/OPENAI_API_KEY model_info: - mode: chat - - model_name: azure-cloudflare - litellm_params: - model: azure/chatgpt-v-2 - api_base: https://gateway.ai.cloudflare.com/v1/0399b10e77ac6668c80404a5ff49eb37/litellm-test/azure-openai/openai-gpt-4-test-v-1 - api_key: os.environ/AZURE_API_KEY - api_version: "2023-07-01-preview" - - model_name: azure-embedding-model - litellm_params: - model: azure/azure-embedding-model - api_base: os.environ/AZURE_API_BASE - api_key: os.environ/AZURE_API_KEY - api_version: "2023-07-01-preview" - model_info: - mode: embedding - base_model: text-embedding-ada-002 - - model_name: text-embedding-ada-002 - litellm_params: - model: text-embedding-ada-002 - api_key: os.environ/OPENAI_API_KEY - model_info: - mode: embedding + base_model: azure/gpt-4 litellm_settings: fallbacks: [{"openai-gpt-3.5": ["azure-gpt-3.5"]}] success_callback: ['langfuse'] diff --git a/ui/litellm-dashboard/src/components/create_key_button.tsx b/ui/litellm-dashboard/src/components/create_key_button.tsx index fe093077a..13c96903e 100644 --- a/ui/litellm-dashboard/src/components/create_key_button.tsx +++ b/ui/litellm-dashboard/src/components/create_key_button.tsx @@ -14,6 +14,7 @@ interface CreateKeyProps { userRole: string | null; accessToken: string; data: any[] | null; + userModels: string[]; setData: React.Dispatch>; } @@ -22,6 +23,7 @@ const CreateKey: React.FC = ({ userRole, accessToken, data, + userModels, setData, }) => { const [form] = Form.useForm(); @@ -42,14 +44,6 @@ const CreateKey: React.FC = ({ const handleCreate = async (formValues: Record) => { try { message.info("Making API Call"); - // Check if "models" exists and is not an empty string - if (formValues.models && formValues.models.trim() !== '') { - // Format the "models" field as an array - formValues.models = formValues.models.split(',').map((model: string) => model.trim()); - } else { - // If "models" is undefined or an empty string, set it to an empty array - formValues.models = []; - } setIsModalVisible(true); const response = await keyCreateCall(accessToken, userID, formValues); setData((prevData) => (prevData ? [...prevData, response] : [response])); // Check if prevData is null @@ -90,13 +84,22 @@ const CreateKey: React.FC = ({ > - + - + {userModels.map((model) => ( + + ))} + +